From f0f743e7c6c3810e1336f2f293b7ba94945bb700 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fran=C3=A7ois=20Bobot?= Date: Fri, 16 Feb 2024 22:46:40 +0100 Subject: [PATCH] New submission (#36) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Initialize poc solver submission * use https://github.com/yxtay/python-project-template template * cleanup template * initialization with poetry * Test validation command * [submission] validate, show, submission dir tests * only python 3.11 * Add direct link for creating new submission * Execute generation in poetry environement * Add missing fields List existing track, division, logic * Pretty print participations of the solver * Add conversion from csv and json schema dump * Add sub command for downloading benchmarks Not tested since server are unavailable * Add download, fix convertion, benchexec * Exclude archive and web from black linting. * Fix extra html tag. * Fix typo. * Add ignore file for prettier. * Blackify. * Fix formatting in archive. * Fix end of files in archive. * List all the web files. * Fix benchexec * Fix csv conversion and use solver final * Compatibility with starexec - One tool module installed with smtcomp - redefine unpack_archive to keep executable bit - in compa_starexec mode execute locally the script with bash (some script don't have #!) * Add a target for running hugo server * Precise the pip to install for generating the doc * Fxi last commit * Add the configuration cache into the code to avoid internet requirements * Add trivia benchmark generator - static cache of configuration of starexec - instructions for using the commands * Add submission instructions and schema. --------- Co-authored-by: Martin Jonáš Co-authored-by: Martin Jonas --- .devcontainer/devcontainer.json | 27 + .devcontainer/postCreateCommand.sh | 7 + .editorconfig | 5 + .github/actions/setup-poetry-env/action.yml | 33 + .github/workflows/main.yml | 75 ++ .github/workflows/validate-codecov-config.yml | 15 + .gitignore | 179 ++- .pre-commit-config.yaml | 27 + .prettierignore | 3 + CONTRIBUTING.md | 133 ++ Dockerfile | 21 + LICENSE | 21 + Makefile | 54 + README.md | 83 ++ archive/2012/benchmark_selection.html | 9 +- archive/2012/benchmark_selection.md | 1 - archive/2012/benchmarks.html | 11 +- archive/2012/benchmarks.md | 2 +- archive/2012/call12.txt | 36 +- archive/2012/index.html | 9 +- archive/2012/participants.html | 9 +- archive/2012/results.html | 9 +- archive/2012/results/AUFLIA+p.html | 9 +- archive/2012/results/AUFLIA-p.html | 9 +- archive/2012/results/AUFNIRA.html | 9 +- archive/2012/results/QF_AUFBV.html | 9 +- archive/2012/results/QF_BV.html | 9 +- archive/2012/results/QF_IDL.html | 11 +- archive/2012/results/QF_UFLIA.html | 11 +- archive/2012/results/QF_UFLRA.html | 9 +- archive/2012/results/summary.html | 9 +- archive/2012/rules.html | 11 +- archive/2012/rules.md | 2 +- archive/2012/specs.html | 13 +- archive/2012/specs.md | 4 +- archive/2012/tools.html | 9 +- archive/2012/tools/TreeLimitedRun.c | 4 +- .../generate_benchmark_selection_list.py | 45 +- .../tools/get_selected_benchmarks_paths.py | 12 +- archive/2012/tools/select_benchmarks_2012.c | 2 +- archive/2014/benchmarks.html | 17 +- archive/2014/benchmarks.md | 10 +- archive/2014/call-for-comments.txt | 16 +- archive/2014/index.html | 11 +- archive/2014/participants.html | 55 +- archive/2014/participants.md | 46 +- archive/2014/results.html | 9 +- archive/2014/results/ALIA.html | 9 +- archive/2014/results/AUFLIA.html | 9 +- archive/2014/results/AUFLIRA.html | 9 +- archive/2014/results/AUFNIRA-app.html | 9 +- archive/2014/results/AUFNIRA.html | 9 +- archive/2014/results/BV.html | 9 +- archive/2014/results/LIA.html | 9 +- archive/2014/results/LRA.html | 9 +- archive/2014/results/NIA.html | 9 +- archive/2014/results/NRA.html | 9 +- archive/2014/results/QF_ABV.html | 9 +- archive/2014/results/QF_ALIA.html | 9 +- archive/2014/results/QF_AUFBV.html | 9 +- archive/2014/results/QF_AUFLIA-app.html | 9 +- archive/2014/results/QF_AUFLIA.html | 9 +- archive/2014/results/QF_AX.html | 9 +- archive/2014/results/QF_BV-app.html | 9 +- archive/2014/results/QF_BV.html | 9 +- archive/2014/results/QF_IDL.html | 9 +- archive/2014/results/QF_LIA-app.html | 9 +- archive/2014/results/QF_LIA.html | 9 +- archive/2014/results/QF_LRA-app.html | 9 +- archive/2014/results/QF_LRA.html | 9 +- archive/2014/results/QF_NIA.html | 9 +- archive/2014/results/QF_NRA.html | 9 +- archive/2014/results/QF_RDL.html | 9 +- archive/2014/results/QF_UF.html | 9 +- archive/2014/results/QF_UFBV.html | 9 +- archive/2014/results/QF_UFIDL.html | 9 +- archive/2014/results/QF_UFLIA-app.html | 9 +- archive/2014/results/QF_UFLIA.html | 9 +- archive/2014/results/QF_UFLRA-app.html | 9 +- archive/2014/results/QF_UFLRA.html | 9 +- archive/2014/results/QF_UFNIA.html | 9 +- archive/2014/results/QF_UFNRA.html | 9 +- archive/2014/results/SLCOMP.html | 9 +- archive/2014/results/UF.html | 9 +- archive/2014/results/UFBV.html | 9 +- archive/2014/results/UFIDL.html | 9 +- archive/2014/results/UFLIA.html | 9 +- archive/2014/results/UFLRA-app.html | 9 +- archive/2014/results/UFLRA.html | 9 +- archive/2014/results/UFNIA.html | 9 +- archive/2014/results/gold.html | 9 +- archive/2014/results/summary-app.html | 9 +- archive/2014/results/summary.html | 9 +- archive/2014/specs.html | 9 +- archive/2014/system-descriptions/Asterix.txt | 2 +- archive/2014/system-descriptions/OpenSMT.txt | 1 - .../kleaver-description.txt | 15 +- archive/2014/tools.html | 31 +- archive/2015/benchmarks.html | 9 +- archive/2015/call-for-comments.txt | 2 +- archive/2015/index.html | 9 +- archive/2015/participants.html | 9 +- archive/2015/results.html | 9 +- archive/2015/results/ALIA-app.html | 11 +- archive/2015/results/ALIA.html | 11 +- archive/2015/results/ANIA-app.html | 11 +- archive/2015/results/AUFLIA.html | 11 +- archive/2015/results/AUFLIRA.html | 11 +- archive/2015/results/AUFNIRA.html | 11 +- archive/2015/results/BV.html | 11 +- archive/2015/results/LIA-app.html | 11 +- archive/2015/results/LIA.html | 11 +- archive/2015/results/LRA.html | 11 +- archive/2015/results/NIA.html | 11 +- archive/2015/results/NRA.html | 11 +- archive/2015/results/QF_ABV.html | 11 +- archive/2015/results/QF_ALIA-app.html | 11 +- archive/2015/results/QF_ALIA.html | 11 +- archive/2015/results/QF_ANIA-app.html | 11 +- archive/2015/results/QF_ANIA.html | 11 +- archive/2015/results/QF_AUFBV.html | 11 +- archive/2015/results/QF_AUFLIA-app.html | 11 +- archive/2015/results/QF_AUFLIA.html | 11 +- archive/2015/results/QF_AUFNIA.html | 11 +- archive/2015/results/QF_AX.html | 11 +- archive/2015/results/QF_BV-app.html | 11 +- archive/2015/results/QF_BV.html | 11 +- archive/2015/results/QF_BVFP.html | 11 +- archive/2015/results/QF_FP.html | 11 +- archive/2015/results/QF_IDL.html | 11 +- archive/2015/results/QF_LIA-app.html | 11 +- archive/2015/results/QF_LIA.html | 11 +- archive/2015/results/QF_LIRA.html | 11 +- archive/2015/results/QF_LRA-app.html | 11 +- archive/2015/results/QF_LRA.html | 11 +- archive/2015/results/QF_NIA-app.html | 11 +- archive/2015/results/QF_NIA.html | 11 +- archive/2015/results/QF_NIRA.html | 11 +- archive/2015/results/QF_NRA.html | 11 +- archive/2015/results/QF_RDL.html | 11 +- archive/2015/results/QF_UF.html | 11 +- archive/2015/results/QF_UFBV.html | 11 +- archive/2015/results/QF_UFIDL.html | 11 +- archive/2015/results/QF_UFLIA-app.html | 11 +- archive/2015/results/QF_UFLIA.html | 11 +- archive/2015/results/QF_UFLRA-app.html | 11 +- archive/2015/results/QF_UFLRA.html | 11 +- archive/2015/results/QF_UFNIA-app.html | 11 +- archive/2015/results/QF_UFNIA.html | 11 +- archive/2015/results/QF_UFNRA.html | 11 +- archive/2015/results/UF.html | 11 +- archive/2015/results/UFBV.html | 11 +- archive/2015/results/UFIDL.html | 11 +- archive/2015/results/UFLIA.html | 11 +- archive/2015/results/UFLRA-app.html | 11 +- archive/2015/results/UFLRA.html | 11 +- archive/2015/results/UFNIA.html | 11 +- archive/2015/results/app-summary.html | 9 +- archive/2015/results/competition-app.html | 9 +- archive/2015/results/competition-main.html | 9 +- archive/2015/results/summary.html | 9 +- archive/2015/specs.html | 9 +- archive/2015/specs.md | 1 - archive/2015/tools.html | 35 +- archive/2016/benchmarks.html | 9 +- archive/2016/call-for-comments.txt | 2 +- archive/2016/index.html | 9 +- archive/2016/participants.html | 9 +- archive/2016/results.html | 9 +- archive/2016/results/ALIA-app.html | 9 +- archive/2016/results/ALIA-ucore.html | 9 +- archive/2016/results/ALIA.html | 9 +- archive/2016/results/ANIA-app.html | 9 +- archive/2016/results/AUFLIA-ucore.html | 9 +- archive/2016/results/AUFLIA.html | 9 +- archive/2016/results/AUFLIRA-ucore.html | 9 +- archive/2016/results/AUFLIRA-unknown.html | 9 +- archive/2016/results/AUFLIRA.html | 9 +- archive/2016/results/AUFNIRA-ucore.html | 9 +- archive/2016/results/AUFNIRA-unknown.html | 9 +- archive/2016/results/AUFNIRA.html | 9 +- archive/2016/results/BV-ucore.html | 9 +- archive/2016/results/BV-unknown.html | 9 +- archive/2016/results/BV.html | 9 +- archive/2016/results/LIA-app.html | 9 +- archive/2016/results/LIA-ucore.html | 9 +- archive/2016/results/LIA-unknown.html | 9 +- archive/2016/results/LIA.html | 9 +- archive/2016/results/LRA-ucore.html | 9 +- archive/2016/results/LRA-unknown.html | 9 +- archive/2016/results/LRA.html | 9 +- archive/2016/results/NIA-ucore.html | 9 +- archive/2016/results/NIA-unknown.html | 9 +- archive/2016/results/NIA.html | 9 +- archive/2016/results/NRA-ucore.html | 9 +- archive/2016/results/NRA-unknown.html | 9 +- archive/2016/results/NRA.html | 9 +- archive/2016/results/QF_ABV-ucore.html | 9 +- archive/2016/results/QF_ABV-unknown.html | 9 +- archive/2016/results/QF_ABV.html | 9 +- archive/2016/results/QF_ALIA-app.html | 9 +- archive/2016/results/QF_ALIA-ucore.html | 9 +- archive/2016/results/QF_ALIA.html | 9 +- archive/2016/results/QF_ANIA-app.html | 9 +- archive/2016/results/QF_ANIA-ucore.html | 9 +- archive/2016/results/QF_ANIA.html | 9 +- archive/2016/results/QF_AUFBV-ucore.html | 9 +- archive/2016/results/QF_AUFBV.html | 9 +- archive/2016/results/QF_AUFLIA-app.html | 9 +- archive/2016/results/QF_AUFLIA-ucore.html | 9 +- archive/2016/results/QF_AUFLIA.html | 9 +- archive/2016/results/QF_AUFNIA-ucore.html | 9 +- archive/2016/results/QF_AUFNIA.html | 9 +- archive/2016/results/QF_AX-ucore.html | 9 +- archive/2016/results/QF_AX.html | 9 +- archive/2016/results/QF_BV-app.html | 9 +- archive/2016/results/QF_BV-ucore.html | 9 +- archive/2016/results/QF_BV-unknown.html | 9 +- archive/2016/results/QF_BV.html | 9 +- archive/2016/results/QF_BVFP-ucore.html | 9 +- archive/2016/results/QF_BVFP.html | 9 +- archive/2016/results/QF_FP-ucore.html | 9 +- archive/2016/results/QF_FP-unknown.html | 9 +- archive/2016/results/QF_FP.html | 9 +- archive/2016/results/QF_IDL-ucore.html | 9 +- archive/2016/results/QF_IDL-unknown.html | 9 +- archive/2016/results/QF_IDL.html | 9 +- archive/2016/results/QF_LIA-app.html | 9 +- archive/2016/results/QF_LIA-ucore.html | 9 +- archive/2016/results/QF_LIA-unknown.html | 9 +- archive/2016/results/QF_LIA.html | 9 +- archive/2016/results/QF_LIRA-ucore.html | 9 +- archive/2016/results/QF_LIRA-unknown.html | 9 +- archive/2016/results/QF_LIRA.html | 9 +- archive/2016/results/QF_LRA-app.html | 9 +- archive/2016/results/QF_LRA-ucore.html | 9 +- archive/2016/results/QF_LRA-unknown.html | 9 +- archive/2016/results/QF_LRA.html | 9 +- archive/2016/results/QF_NIA-app.html | 9 +- archive/2016/results/QF_NIA-ucore.html | 9 +- archive/2016/results/QF_NIA-unknown.html | 9 +- archive/2016/results/QF_NIA.html | 9 +- archive/2016/results/QF_NIRA-ucore.html | 9 +- archive/2016/results/QF_NIRA-unknown.html | 9 +- archive/2016/results/QF_NIRA.html | 9 +- archive/2016/results/QF_NRA-ucore.html | 9 +- archive/2016/results/QF_NRA-unknown.html | 9 +- archive/2016/results/QF_NRA.html | 9 +- archive/2016/results/QF_RDL-ucore.html | 9 +- archive/2016/results/QF_RDL-unknown.html | 9 +- archive/2016/results/QF_RDL.html | 9 +- archive/2016/results/QF_UF-ucore.html | 9 +- archive/2016/results/QF_UF-unknown.html | 9 +- archive/2016/results/QF_UF.html | 9 +- archive/2016/results/QF_UFBV-ucore.html | 9 +- archive/2016/results/QF_UFBV.html | 9 +- archive/2016/results/QF_UFIDL-ucore.html | 9 +- archive/2016/results/QF_UFIDL.html | 9 +- archive/2016/results/QF_UFLIA-app.html | 9 +- archive/2016/results/QF_UFLIA-ucore.html | 9 +- archive/2016/results/QF_UFLIA.html | 9 +- archive/2016/results/QF_UFLRA-app.html | 9 +- archive/2016/results/QF_UFLRA-ucore.html | 9 +- archive/2016/results/QF_UFLRA-unknown.html | 9 +- archive/2016/results/QF_UFLRA.html | 9 +- archive/2016/results/QF_UFNIA-app.html | 9 +- archive/2016/results/QF_UFNIA-ucore.html | 9 +- archive/2016/results/QF_UFNIA.html | 9 +- archive/2016/results/QF_UFNRA-ucore.html | 9 +- archive/2016/results/QF_UFNRA-unknown.html | 9 +- archive/2016/results/QF_UFNRA.html | 9 +- archive/2016/results/UF-ucore.html | 9 +- archive/2016/results/UF-unknown.html | 9 +- archive/2016/results/UF.html | 9 +- archive/2016/results/UFBV-ucore.html | 9 +- archive/2016/results/UFBV-unknown.html | 9 +- archive/2016/results/UFBV.html | 9 +- archive/2016/results/UFIDL-ucore.html | 9 +- archive/2016/results/UFIDL-unknown.html | 9 +- archive/2016/results/UFIDL.html | 9 +- archive/2016/results/UFLIA-ucore.html | 9 +- archive/2016/results/UFLIA-unknown.html | 9 +- archive/2016/results/UFLIA.html | 9 +- archive/2016/results/UFLRA-app.html | 9 +- archive/2016/results/UFLRA-ucore.html | 9 +- archive/2016/results/UFLRA.html | 9 +- archive/2016/results/UFNIA-ucore.html | 9 +- archive/2016/results/UFNIA-unknown.html | 9 +- archive/2016/results/UFNIA.html | 9 +- archive/2016/results/app-summary.html | 9 +- archive/2016/results/competition-main.html | 9 +- archive/2016/results/summary.html | 9 +- archive/2016/results/ucore-summary.html | 9 +- archive/2016/results/unknown-summary.html | 9 +- archive/2016/specs.html | 9 +- archive/2016/tools.html | 43 +- archive/2017/benchmarks.html | 7 +- archive/2017/call-for-comments.txt | 2 +- archive/2017/index.html | 7 +- archive/2017/news/2017-03-15.html | 7 +- archive/2017/news/2017-04-21.html | 7 +- archive/2017/news/2017-06-20.html | 7 +- archive/2017/news/2017-07-23.html | 7 +- archive/2017/participants.html | 7 +- archive/2017/participants.md | 1 - archive/2017/results.html | 7 +- archive/2017/results/ABVFP.html | 7 +- archive/2017/results/ALIA-app.html | 7 +- archive/2017/results/ALIA-ucore.html | 7 +- archive/2017/results/ALIA.html | 7 +- archive/2017/results/ANIA-app.html | 7 +- archive/2017/results/ANIA-ucore.html | 7 +- archive/2017/results/AUFBVDTLIA.html | 9 +- archive/2017/results/AUFDTLIA.html | 7 +- archive/2017/results/AUFLIA-ucore.html | 7 +- archive/2017/results/AUFLIA.html | 7 +- archive/2017/results/AUFLIRA-ucore.html | 7 +- archive/2017/results/AUFLIRA.html | 9 +- archive/2017/results/AUFNIRA-ucore.html | 9 +- archive/2017/results/AUFNIRA.html | 9 +- archive/2017/results/BV-ucore.html | 7 +- archive/2017/results/BV.html | 9 +- archive/2017/results/LIA-app.html | 7 +- archive/2017/results/LIA-ucore.html | 7 +- archive/2017/results/LIA.html | 7 +- archive/2017/results/LRA-ucore.html | 9 +- archive/2017/results/LRA.html | 7 +- archive/2017/results/NIA-ucore.html | 7 +- archive/2017/results/NIA.html | 7 +- archive/2017/results/NRA-ucore.html | 9 +- archive/2017/results/NRA.html | 7 +- archive/2017/results/QF_ABV-ucore.html | 7 +- archive/2017/results/QF_ABV.html | 9 +- archive/2017/results/QF_ALIA-app.html | 7 +- archive/2017/results/QF_ALIA-ucore.html | 9 +- archive/2017/results/QF_ALIA.html | 7 +- archive/2017/results/QF_ANIA-app.html | 7 +- archive/2017/results/QF_ANIA-ucore.html | 7 +- archive/2017/results/QF_ANIA.html | 7 +- archive/2017/results/QF_AUFBV-ucore.html | 7 +- archive/2017/results/QF_AUFBV.html | 7 +- archive/2017/results/QF_AUFLIA-app.html | 7 +- archive/2017/results/QF_AUFLIA-ucore.html | 7 +- archive/2017/results/QF_AUFLIA.html | 9 +- archive/2017/results/QF_AUFNIA-ucore.html | 7 +- archive/2017/results/QF_AUFNIA.html | 7 +- archive/2017/results/QF_AX-ucore.html | 7 +- archive/2017/results/QF_AX.html | 7 +- archive/2017/results/QF_BV-app.html | 7 +- archive/2017/results/QF_BV-ucore.html | 7 +- archive/2017/results/QF_BV.html | 9 +- archive/2017/results/QF_BVFP-app.html | 7 +- archive/2017/results/QF_BVFP-ucore.html | 7 +- archive/2017/results/QF_BVFP.html | 7 +- archive/2017/results/QF_DT.html | 9 +- archive/2017/results/QF_FP-app.html | 7 +- archive/2017/results/QF_FP-ucore.html | 9 +- archive/2017/results/QF_FP.html | 9 +- archive/2017/results/QF_IDL-ucore.html | 7 +- archive/2017/results/QF_IDL.html | 7 +- archive/2017/results/QF_LIA-app.html | 7 +- archive/2017/results/QF_LIA-ucore.html | 7 +- archive/2017/results/QF_LIA.html | 7 +- archive/2017/results/QF_LIRA-ucore.html | 7 +- archive/2017/results/QF_LIRA.html | 7 +- archive/2017/results/QF_LRA-app.html | 9 +- archive/2017/results/QF_LRA-ucore.html | 7 +- archive/2017/results/QF_LRA.html | 7 +- archive/2017/results/QF_NIA-app.html | 9 +- archive/2017/results/QF_NIA-ucore.html | 9 +- archive/2017/results/QF_NIA.html | 7 +- archive/2017/results/QF_NIRA-ucore.html | 7 +- archive/2017/results/QF_NIRA.html | 7 +- archive/2017/results/QF_NRA-ucore.html | 7 +- archive/2017/results/QF_NRA.html | 7 +- archive/2017/results/QF_RDL-ucore.html | 7 +- archive/2017/results/QF_RDL.html | 7 +- archive/2017/results/QF_UF-ucore.html | 9 +- archive/2017/results/QF_UF.html | 9 +- archive/2017/results/QF_UFBV-ucore.html | 7 +- archive/2017/results/QF_UFBV.html | 7 +- archive/2017/results/QF_UFIDL-ucore.html | 7 +- archive/2017/results/QF_UFIDL.html | 7 +- archive/2017/results/QF_UFLIA-app.html | 9 +- archive/2017/results/QF_UFLIA-ucore.html | 7 +- archive/2017/results/QF_UFLIA.html | 7 +- archive/2017/results/QF_UFLRA-app.html | 9 +- archive/2017/results/QF_UFLRA-ucore.html | 7 +- archive/2017/results/QF_UFLRA.html | 7 +- archive/2017/results/QF_UFNIA-app.html | 7 +- archive/2017/results/QF_UFNIA-ucore.html | 7 +- archive/2017/results/QF_UFNIA.html | 7 +- archive/2017/results/QF_UFNRA-ucore.html | 7 +- archive/2017/results/QF_UFNRA.html | 7 +- archive/2017/results/UF-ucore.html | 7 +- archive/2017/results/UF.html | 7 +- archive/2017/results/UFBV-ucore.html | 7 +- archive/2017/results/UFBV.html | 9 +- archive/2017/results/UFDT.html | 7 +- archive/2017/results/UFDTLIA.html | 9 +- archive/2017/results/UFIDL-ucore.html | 7 +- archive/2017/results/UFIDL.html | 7 +- archive/2017/results/UFLIA-ucore.html | 7 +- archive/2017/results/UFLIA.html | 9 +- archive/2017/results/UFLRA-app.html | 9 +- archive/2017/results/UFLRA-ucore.html | 9 +- archive/2017/results/UFLRA.html | 7 +- archive/2017/results/UFNIA-ucore.html | 7 +- archive/2017/results/UFNIA.html | 9 +- archive/2017/results/competition-main.html | 7 +- archive/2017/results/summary-app.html | 7 +- archive/2017/results/summary-main.html | 7 +- archive/2017/results/summary-ucore.html | 7 +- archive/2017/specs.html | 7 +- archive/2017/tools.html | 41 +- archive/2018/benchmarks.html | 7 +- archive/2018/benchmarks.md | 3 +- archive/2018/call-for-comments.txt | 2 +- archive/2018/index.html | 7 +- archive/2018/news.html | 7 +- archive/2018/news/2018-03-16.html | 7 +- archive/2018/news/2018-04-17.html | 7 +- archive/2018/news/2018-05-21.html | 7 +- archive/2018/news/2018-06-12.html | 7 +- archive/2018/news/2018-07-13.html | 7 +- archive/2018/news/2018-07-14.html | 7 +- archive/2018/participants.html | 7 +- archive/2018/results.html | 9 +- archive/2018/results/ABVFP.html | 7 +- archive/2018/results/ALIA-app.html | 7 +- archive/2018/results/ALIA-ucore.html | 7 +- archive/2018/results/ALIA.html | 7 +- archive/2018/results/ANIA-app.html | 7 +- archive/2018/results/AUFBVDTLIA-ucore.html | 7 +- archive/2018/results/AUFBVDTLIA.html | 7 +- archive/2018/results/AUFDTLIA.html | 7 +- archive/2018/results/AUFLIA-ucore.html | 7 +- archive/2018/results/AUFLIA.html | 7 +- archive/2018/results/AUFLIRA-ucore.html | 7 +- archive/2018/results/AUFLIRA.html | 7 +- archive/2018/results/AUFNIRA-app.html | 7 +- archive/2018/results/AUFNIRA-ucore.html | 7 +- archive/2018/results/AUFNIRA.html | 7 +- archive/2018/results/BV-app.html | 7 +- archive/2018/results/BV-ucore.html | 7 +- archive/2018/results/BV.html | 7 +- archive/2018/results/BVFP.html | 7 +- archive/2018/results/FP.html | 7 +- archive/2018/results/LIA-app.html | 7 +- archive/2018/results/LIA-ucore.html | 7 +- archive/2018/results/LIA.html | 7 +- archive/2018/results/LRA-ucore.html | 7 +- archive/2018/results/LRA.html | 7 +- archive/2018/results/NIA-ucore.html | 7 +- archive/2018/results/NIA.html | 7 +- archive/2018/results/NRA-ucore.html | 7 +- archive/2018/results/NRA.html | 7 +- archive/2018/results/QF_ABV-app.html | 7 +- archive/2018/results/QF_ABV-ucore.html | 7 +- archive/2018/results/QF_ABV.html | 7 +- archive/2018/results/QF_ABVFP-ucore.html | 7 +- archive/2018/results/QF_ABVFP.html | 7 +- archive/2018/results/QF_ALIA-app.html | 7 +- archive/2018/results/QF_ALIA-ucore.html | 7 +- archive/2018/results/QF_ALIA.html | 7 +- archive/2018/results/QF_ANIA-app.html | 7 +- archive/2018/results/QF_ANIA-ucore.html | 7 +- archive/2018/results/QF_ANIA.html | 7 +- archive/2018/results/QF_AUFBV-app.html | 7 +- archive/2018/results/QF_AUFBV-ucore.html | 7 +- archive/2018/results/QF_AUFBV.html | 7 +- archive/2018/results/QF_AUFLIA-app.html | 7 +- archive/2018/results/QF_AUFLIA-ucore.html | 7 +- archive/2018/results/QF_AUFLIA.html | 7 +- archive/2018/results/QF_AUFNIA-ucore.html | 7 +- archive/2018/results/QF_AUFNIA.html | 7 +- archive/2018/results/QF_AX-ucore.html | 7 +- archive/2018/results/QF_AX.html | 7 +- archive/2018/results/QF_BV-app.html | 7 +- archive/2018/results/QF_BV-ucore.html | 7 +- archive/2018/results/QF_BV.html | 7 +- archive/2018/results/QF_BVFP-app.html | 7 +- archive/2018/results/QF_BVFP-ucore.html | 7 +- archive/2018/results/QF_BVFP.html | 7 +- archive/2018/results/QF_DT-ucore.html | 7 +- archive/2018/results/QF_DT.html | 7 +- archive/2018/results/QF_FP-app.html | 7 +- archive/2018/results/QF_FP-ucore.html | 7 +- archive/2018/results/QF_FP.html | 7 +- archive/2018/results/QF_IDL-ucore.html | 7 +- archive/2018/results/QF_IDL.html | 7 +- archive/2018/results/QF_LIA-app.html | 7 +- archive/2018/results/QF_LIA-ucore.html | 7 +- archive/2018/results/QF_LIA.html | 7 +- archive/2018/results/QF_LIRA-ucore.html | 7 +- archive/2018/results/QF_LIRA.html | 7 +- archive/2018/results/QF_LRA-app.html | 7 +- archive/2018/results/QF_LRA-ucore.html | 7 +- archive/2018/results/QF_LRA.html | 7 +- archive/2018/results/QF_NIA-app.html | 7 +- archive/2018/results/QF_NIA-ucore.html | 7 +- archive/2018/results/QF_NIA.html | 7 +- archive/2018/results/QF_NIRA-ucore.html | 7 +- archive/2018/results/QF_NIRA.html | 7 +- archive/2018/results/QF_NRA-ucore.html | 7 +- archive/2018/results/QF_NRA.html | 7 +- archive/2018/results/QF_RDL-ucore.html | 7 +- archive/2018/results/QF_RDL.html | 7 +- archive/2018/results/QF_SLIA.html | 7 +- archive/2018/results/QF_UF-ucore.html | 7 +- archive/2018/results/QF_UF.html | 7 +- archive/2018/results/QF_UFBV-app.html | 7 +- archive/2018/results/QF_UFBV-ucore.html | 7 +- archive/2018/results/QF_UFBV.html | 7 +- archive/2018/results/QF_UFIDL-ucore.html | 7 +- archive/2018/results/QF_UFIDL.html | 7 +- archive/2018/results/QF_UFLIA-app.html | 7 +- archive/2018/results/QF_UFLIA-ucore.html | 7 +- archive/2018/results/QF_UFLIA.html | 7 +- archive/2018/results/QF_UFLRA-app.html | 7 +- archive/2018/results/QF_UFLRA-ucore.html | 7 +- archive/2018/results/QF_UFLRA.html | 7 +- archive/2018/results/QF_UFNIA-app.html | 7 +- archive/2018/results/QF_UFNIA-ucore.html | 7 +- archive/2018/results/QF_UFNIA.html | 7 +- archive/2018/results/QF_UFNRA-ucore.html | 7 +- archive/2018/results/QF_UFNRA.html | 7 +- archive/2018/results/UF-ucore.html | 7 +- archive/2018/results/UF.html | 7 +- archive/2018/results/UFBV-ucore.html | 7 +- archive/2018/results/UFBV.html | 7 +- archive/2018/results/UFDT-ucore.html | 7 +- archive/2018/results/UFDT.html | 7 +- archive/2018/results/UFDTLIA.html | 7 +- archive/2018/results/UFIDL-ucore.html | 7 +- archive/2018/results/UFIDL.html | 7 +- archive/2018/results/UFLIA-ucore.html | 7 +- archive/2018/results/UFLIA.html | 7 +- archive/2018/results/UFLRA-app.html | 7 +- archive/2018/results/UFLRA-ucore.html | 7 +- archive/2018/results/UFLRA.html | 7 +- archive/2018/results/UFNIA-ucore.html | 7 +- archive/2018/results/UFNIA.html | 7 +- archive/2018/results/competition-main.html | 7 +- archive/2018/results/summary-app.html | 7 +- archive/2018/results/summary-main.html | 7 +- archive/2018/results/summary-ucore.html | 7 +- archive/2018/slides.html | 7 +- archive/2018/specs.html | 7 +- archive/2018/tools.html | 41 +- archive/2019/benchmarks.html | 7 +- archive/2019/divisions/abvfp.html | 7 +- archive/2019/divisions/alia.html | 7 +- archive/2019/divisions/ania.html | 7 +- archive/2019/divisions/aufbvdtlia.html | 7 +- archive/2019/divisions/aufdtlia.html | 7 +- archive/2019/divisions/auflia.html | 7 +- archive/2019/divisions/auflira.html | 7 +- archive/2019/divisions/aufnia.html | 7 +- archive/2019/divisions/aufnira.html | 7 +- archive/2019/divisions/bv.html | 7 +- archive/2019/divisions/bvfp.html | 7 +- archive/2019/divisions/fp.html | 7 +- archive/2019/divisions/lia.html | 7 +- archive/2019/divisions/lra.html | 7 +- archive/2019/divisions/nia.html | 7 +- archive/2019/divisions/nra.html | 7 +- archive/2019/divisions/qf-abv.html | 7 +- archive/2019/divisions/qf-abvfp.html | 7 +- archive/2019/divisions/qf-alia.html | 7 +- archive/2019/divisions/qf-ania.html | 7 +- archive/2019/divisions/qf-aufbv.html | 7 +- archive/2019/divisions/qf-aufbvlia.html | 7 +- archive/2019/divisions/qf-aufbvnia.html | 7 +- archive/2019/divisions/qf-auflia.html | 7 +- archive/2019/divisions/qf-aufnia.html | 7 +- archive/2019/divisions/qf-ax.html | 7 +- archive/2019/divisions/qf-bv.html | 7 +- archive/2019/divisions/qf-bvfp.html | 7 +- archive/2019/divisions/qf-bvfplra.html | 7 +- archive/2019/divisions/qf-dt.html | 7 +- archive/2019/divisions/qf-fp.html | 7 +- archive/2019/divisions/qf-fplra.html | 7 +- archive/2019/divisions/qf-idl.html | 7 +- archive/2019/divisions/qf-lia.html | 7 +- archive/2019/divisions/qf-lira.html | 7 +- archive/2019/divisions/qf-lra.html | 7 +- archive/2019/divisions/qf-nia.html | 7 +- archive/2019/divisions/qf-nira.html | 7 +- archive/2019/divisions/qf-nra.html | 7 +- archive/2019/divisions/qf-rdl.html | 7 +- archive/2019/divisions/qf-s.html | 7 +- archive/2019/divisions/qf-slia.html | 7 +- archive/2019/divisions/qf-uf.html | 7 +- archive/2019/divisions/qf-ufbv.html | 7 +- archive/2019/divisions/qf-ufbvlia.html | 7 +- archive/2019/divisions/qf-ufidl.html | 7 +- archive/2019/divisions/qf-uflia.html | 7 +- archive/2019/divisions/qf-uflra.html | 7 +- archive/2019/divisions/qf-ufnia.html | 7 +- archive/2019/divisions/qf-ufnra.html | 7 +- archive/2019/divisions/uf.html | 7 +- archive/2019/divisions/ufbv.html | 7 +- archive/2019/divisions/ufdt.html | 7 +- archive/2019/divisions/ufdtlia.html | 7 +- archive/2019/divisions/ufdtnia.html | 7 +- archive/2019/divisions/ufidl.html | 7 +- archive/2019/divisions/uflia.html | 7 +- archive/2019/divisions/uflra.html | 7 +- archive/2019/divisions/ufnia.html | 7 +- archive/2019/index.html | 7 +- archive/2019/news.html | 7 +- archive/2019/news/2019-01-24.html | 7 +- archive/2019/news/2019-04-19.html | 7 +- archive/2019/news/2019-06-06.html | 7 +- archive/2019/news/2019-07-07.html | 7 +- archive/2019/participants.html | 7 +- .../2018-boolector-incremental.html | 8 +- archive/2019/participants/2018-boolector.html | 8 +- archive/2019/participants/2018-colibri.html | 8 +- .../participants/2018-cvc4-incremental.html | 8 +- .../participants/2018-cvc4-unsat-core.html | 8 +- archive/2019/participants/2018-cvc4.html | 8 +- .../2018-mathsat-incremental.html | 8 +- .../participants/2018-mathsat-unsat-core.html | 8 +- .../2019/participants/2018-minkeyrink-mt.html | 8 +- .../2018-smtinterpol-unsat-core.html | 8 +- .../2019/participants/2018-smtrat-rat.html | 8 +- .../2019/participants/2018-spass-satt.html | 8 +- archive/2019/participants/2018-vampire.html | 8 +- .../participants/2018-yices-incremental.html | 8 +- .../participants/2018-yices-unsat-core.html | 8 +- archive/2019/participants/2018-yices.html | 8 +- .../participants/2018-z3-incremental.html | 8 +- .../2019/participants/2018-z3-unsat-core.html | 8 +- archive/2019/participants/2018-z3.html | 8 +- archive/2019/participants/alt-ergo.html | 8 +- archive/2019/participants/aprove.html | 8 +- .../participants/boolector-incremental.html | 8 +- .../2019/participants/boolector-reasonls.html | 8 +- archive/2019/participants/boolector.html | 8 +- archive/2019/participants/colibri.html | 8 +- archive/2019/participants/ctrl-ergo.html | 8 +- archive/2019/participants/cvc4-inc-fixed.html | 8 +- archive/2019/participants/cvc4-inc.html | 8 +- archive/2019/participants/cvc4-mv.html | 8 +- archive/2019/participants/cvc4-symbreak.html | 8 +- archive/2019/participants/cvc4-uc.html | 8 +- archive/2019/participants/cvc4.html | 8 +- .../2019/participants/mathsat-default.html | 8 +- archive/2019/participants/mathsat-na-ext.html | 8 +- .../participants/minkeyrink-solver-mt.html | 8 +- .../2019/participants/minkeyrink-solver.html | 8 +- archive/2019/participants/opensmt2.html | 8 +- archive/2019/participants/par4.html | 8 +- archive/2019/participants/poolector.html | 8 +- archive/2019/participants/prob.html | 8 +- archive/2019/participants/q3b.html | 8 +- archive/2019/participants/smt-rat.html | 8 +- archive/2019/participants/smtinterpol.html | 8 +- archive/2019/participants/smtrat-mcsat.html | 8 +- archive/2019/participants/spass-satt.html | 8 +- .../2019/participants/stp-incremental.html | 8 +- .../2019/participants/stp-mergesat-fixed.html | 8 +- archive/2019/participants/stp-mergesat.html | 8 +- archive/2019/participants/stp-minisat.html | 8 +- archive/2019/participants/stp-mt.html | 8 +- .../participants/stp-portfolio-fixed.html | 8 +- archive/2019/participants/stp-portfolio.html | 8 +- archive/2019/participants/stp-riss.html | 8 +- archive/2019/participants/stp.html | 8 +- .../ultimateeliminator-mathsat-5-5-4.html | 8 +- .../ultimateeliminator-smtinterpol.html | 8 +- .../ultimateeliminator-yices-2-6-1.html | 8 +- archive/2019/participants/vampire.html | 8 +- .../2019/participants/verit-rasat-redlog.html | 8 +- archive/2019/participants/verit.html | 8 +- .../yices-2-6-2-cadical-smt-lib2-models.html | 8 +- .../participants/yices-2-6-2-cadical.html | 8 +- ...s-2-6-2-cryptominisat-smt-lib2-models.html | 8 +- .../yices-2-6-2-cryptominisat.html | 8 +- .../participants/yices-2-6-2-incremental.html | 8 +- .../participants/yices-2-6-2-mcsat-bv.html | 8 +- .../yices-2-6-2-model-validation.html | 8 +- ...s-2-6-2-new-bvsolver-with-smt2-models.html | 8 +- .../yices-2-6-2-new-bvsolver.html | 8 +- archive/2019/participants/yices-2-6-2.html | 8 +- archive/2019/participants/z3.html | 8 +- archive/2019/results.html | 343 +++-- archive/2019/results/abvfp-incremental.html | 21 +- archive/2019/results/abvfp-single-query.html | 61 +- archive/2019/results/alia-incremental.html | 29 +- archive/2019/results/alia-single-query.html | 121 +- archive/2019/results/ania-incremental.html | 25 +- .../2019/results/aufdtlia-single-query.html | 61 +- archive/2019/results/auflia-single-query.html | 131 +- archive/2019/results/auflia-unsat-core.html | 39 +- .../2019/results/auflira-single-query.html | 141 ++- archive/2019/results/auflira-unsat-core.html | 39 +- archive/2019/results/aufnia-single-query.html | 91 +- archive/2019/results/aufnia-unsat-core.html | 31 +- archive/2019/results/aufnira-incremental.html | 25 +- .../2019/results/aufnira-single-query.html | 101 +- archive/2019/results/aufnira-unsat-core.html | 35 +- .../biggest-lead-challenge-incremental.html | 41 +- ...iggest-lead-challenge-non-incremental.html | 93 +- .../results/biggest-lead-incremental.html | 81 +- .../biggest-lead-model-validation.html | 11 +- .../results/biggest-lead-single-query.html | 533 ++++---- .../2019/results/biggest-lead-unsat-core.html | 163 ++- archive/2019/results/bv-incremental.html | 25 +- archive/2019/results/bv-single-query.html | 101 +- archive/2019/results/bv-unsat-core.html | 31 +- archive/2019/results/bvfp-incremental.html | 21 +- archive/2019/results/bvfp-single-query.html | 61 +- archive/2019/results/fp-single-query.html | 61 +- ...st-contribution-challenge-incremental.html | 41 +- ...ontribution-challenge-non-incremental.html | 93 +- .../largest-contribution-incremental.html | 77 +- ...largest-contribution-model-validation.html | 11 +- .../largest-contribution-single-query.html | 485 ++++--- .../largest-contribution-unsat-core.html | 143 ++- archive/2019/results/lia-incremental.html | 29 +- archive/2019/results/lia-single-query.html | 121 +- archive/2019/results/lia-unsat-core.html | 39 +- archive/2019/results/lra-incremental.html | 27 +- archive/2019/results/lra-single-query.html | 111 +- archive/2019/results/nia-single-query.html | 91 +- archive/2019/results/nia-unsat-core.html | 35 +- archive/2019/results/nra-single-query.html | 101 +- .../results/qf-abv-challenge-incremental.html | 25 +- .../qf-abv-challenge-non-incremental.html | 81 +- archive/2019/results/qf-abv-incremental.html | 25 +- archive/2019/results/qf-abv-single-query.html | 91 +- archive/2019/results/qf-abv-unsat-core.html | 31 +- archive/2019/results/qf-alia-incremental.html | 25 +- .../2019/results/qf-alia-single-query.html | 91 +- archive/2019/results/qf-alia-unsat-core.html | 35 +- archive/2019/results/qf-ania-incremental.html | 25 +- .../2019/results/qf-ania-single-query.html | 81 +- archive/2019/results/qf-ania-unsat-core.html | 35 +- .../qf-aufbv-challenge-incremental.html | 25 +- .../qf-aufbv-challenge-non-incremental.html | 81 +- .../2019/results/qf-aufbv-incremental.html | 25 +- .../2019/results/qf-aufbv-single-query.html | 81 +- archive/2019/results/qf-aufbv-unsat-core.html | 31 +- .../2019/results/qf-aufbvnia-incremental.html | 23 +- .../2019/results/qf-auflia-incremental.html | 25 +- .../2019/results/qf-auflia-single-query.html | 91 +- .../2019/results/qf-auflia-unsat-core.html | 35 +- .../2019/results/qf-aufnia-single-query.html | 81 +- .../2019/results/qf-aufnia-unsat-core.html | 35 +- archive/2019/results/qf-ax-single-query.html | 81 +- archive/2019/results/qf-ax-unsat-core.html | 35 +- .../results/qf-bv-challenge-incremental.html | 35 +- .../qf-bv-challenge-non-incremental.html | 221 ++-- archive/2019/results/qf-bv-incremental.html | 37 +- .../2019/results/qf-bv-model-validation.html | 51 +- archive/2019/results/qf-bv-single-query.html | 261 ++-- archive/2019/results/qf-bv-unsat-core.html | 31 +- .../2019/results/qf-bvfp-single-query.html | 61 +- archive/2019/results/qf-dt-single-query.html | 51 +- archive/2019/results/qf-fp-single-query.html | 71 +- archive/2019/results/qf-idl-single-query.html | 101 +- archive/2019/results/qf-lia-incremental.html | 27 +- archive/2019/results/qf-lia-single-query.html | 131 +- archive/2019/results/qf-lia-unsat-core.html | 35 +- .../2019/results/qf-lira-single-query.html | 81 +- archive/2019/results/qf-lira-unsat-core.html | 35 +- archive/2019/results/qf-lra-incremental.html | 25 +- archive/2019/results/qf-lra-single-query.html | 131 +- archive/2019/results/qf-lra-unsat-core.html | 35 +- archive/2019/results/qf-nia-incremental.html | 27 +- archive/2019/results/qf-nia-single-query.html | 131 +- archive/2019/results/qf-nia-unsat-core.html | 35 +- .../2019/results/qf-nira-single-query.html | 91 +- archive/2019/results/qf-nira-unsat-core.html | 35 +- archive/2019/results/qf-nra-single-query.html | 131 +- archive/2019/results/qf-nra-unsat-core.html | 35 +- archive/2019/results/qf-rdl-single-query.html | 81 +- archive/2019/results/qf-s-single-query.html | 21 +- .../2019/results/qf-slia-single-query.html | 31 +- archive/2019/results/qf-uf-incremental.html | 23 +- archive/2019/results/qf-uf-single-query.html | 111 +- archive/2019/results/qf-uf-unsat-core.html | 35 +- archive/2019/results/qf-ufbv-incremental.html | 25 +- .../2019/results/qf-ufbv-single-query.html | 81 +- archive/2019/results/qf-ufbv-unsat-core.html | 31 +- .../2019/results/qf-ufidl-single-query.html | 81 +- archive/2019/results/qf-ufidl-unsat-core.html | 35 +- .../2019/results/qf-uflia-incremental.html | 25 +- .../2019/results/qf-uflia-single-query.html | 91 +- archive/2019/results/qf-uflia-unsat-core.html | 35 +- .../2019/results/qf-uflra-incremental.html | 25 +- .../2019/results/qf-uflra-single-query.html | 91 +- archive/2019/results/qf-uflra-unsat-core.html | 35 +- .../2019/results/qf-ufnia-incremental.html | 27 +- .../2019/results/qf-ufnia-single-query.html | 91 +- archive/2019/results/qf-ufnia-unsat-core.html | 35 +- .../2019/results/qf-ufnra-single-query.html | 111 +- archive/2019/results/qf-ufnra-unsat-core.html | 35 +- .../results-challenge-incremental.html | 7 +- .../results-challenge-non-incremental.html | 7 +- archive/2019/results/results-incremental.html | 7 +- .../results/results-model-validation.html | 7 +- .../2019/results/results-single-query.html | 7 +- archive/2019/results/results-unsat-core.html | 7 +- archive/2019/results/uf-single-query.html | 141 ++- archive/2019/results/uf-unsat-core.html | 39 +- archive/2019/results/ufbv-single-query.html | 71 +- archive/2019/results/ufdt-single-query.html | 61 +- .../2019/results/ufdtlia-single-query.html | 61 +- .../2019/results/ufdtnia-single-query.html | 51 +- archive/2019/results/ufidl-single-query.html | 121 +- archive/2019/results/ufidl-unsat-core.html | 39 +- archive/2019/results/uflia-single-query.html | 131 +- archive/2019/results/uflia-unsat-core.html | 39 +- archive/2019/results/uflra-incremental.html | 29 +- archive/2019/results/uflra-single-query.html | 121 +- archive/2019/results/uflra-unsat-core.html | 39 +- archive/2019/results/ufnia-single-query.html | 121 +- archive/2019/results/ufnia-unsat-core.html | 35 +- archive/2019/specs.html | 7 +- archive/2019/tools.html | 65 +- archive/2020/benchmarks.html | 7 +- .../2020/disagreements/fp-single-query.html | 21 +- .../2020/disagreements/lia-single-query.html | 9 +- .../qf-abvfplra-single-query.html | 13 +- .../disagreements/qf-bvfp-incremental.html | 9 +- .../2020/disagreements/qf-s-single-query.html | 11 +- .../disagreements/qf-slia-single-query.html | 119 +- .../2020/disagreements/uf-incremental.html | 201 ++- .../disagreements/ufnia-single-query.html | 99 +- archive/2020/divisions/abv.html | 7 +- archive/2020/divisions/abvfp.html | 7 +- archive/2020/divisions/abvfplra.html | 7 +- archive/2020/divisions/alia.html | 7 +- archive/2020/divisions/ania.html | 7 +- archive/2020/divisions/aufbvdtlia.html | 7 +- archive/2020/divisions/aufbvdtnia.html | 7 +- archive/2020/divisions/aufdtlia.html | 7 +- archive/2020/divisions/aufdtlira.html | 7 +- archive/2020/divisions/aufdtnira.html | 7 +- archive/2020/divisions/auffpdtlira.html | 7 +- archive/2020/divisions/auffpdtnira.html | 7 +- archive/2020/divisions/auflia.html | 7 +- archive/2020/divisions/auflira.html | 7 +- archive/2020/divisions/aufnia.html | 7 +- archive/2020/divisions/aufnira.html | 7 +- archive/2020/divisions/bv.html | 7 +- archive/2020/divisions/bvfp.html | 7 +- archive/2020/divisions/bvfplra.html | 7 +- archive/2020/divisions/fp.html | 7 +- archive/2020/divisions/fplra.html | 7 +- archive/2020/divisions/lia.html | 7 +- archive/2020/divisions/lra.html | 7 +- archive/2020/divisions/nia.html | 7 +- archive/2020/divisions/nra.html | 7 +- archive/2020/divisions/qf-abv.html | 7 +- archive/2020/divisions/qf-abvfp.html | 7 +- archive/2020/divisions/qf-abvfplra.html | 7 +- archive/2020/divisions/qf-alia.html | 7 +- archive/2020/divisions/qf-ania.html | 7 +- archive/2020/divisions/qf-aufbv.html | 7 +- archive/2020/divisions/qf-aufbvlia.html | 7 +- archive/2020/divisions/qf-aufbvnia.html | 7 +- archive/2020/divisions/qf-auflia.html | 7 +- archive/2020/divisions/qf-aufnia.html | 7 +- archive/2020/divisions/qf-ax.html | 7 +- archive/2020/divisions/qf-bv.html | 7 +- archive/2020/divisions/qf-bvfp.html | 7 +- archive/2020/divisions/qf-bvfplra.html | 7 +- archive/2020/divisions/qf-dt.html | 7 +- archive/2020/divisions/qf-fp.html | 7 +- archive/2020/divisions/qf-fplra.html | 7 +- archive/2020/divisions/qf-idl.html | 7 +- archive/2020/divisions/qf-lia.html | 7 +- archive/2020/divisions/qf-lira.html | 7 +- archive/2020/divisions/qf-lra.html | 7 +- archive/2020/divisions/qf-nia.html | 7 +- archive/2020/divisions/qf-nira.html | 7 +- archive/2020/divisions/qf-nra.html | 7 +- archive/2020/divisions/qf-rdl.html | 7 +- archive/2020/divisions/qf-s.html | 7 +- archive/2020/divisions/qf-slia.html | 7 +- archive/2020/divisions/qf-uf.html | 7 +- archive/2020/divisions/qf-ufbv.html | 7 +- archive/2020/divisions/qf-ufbvlia.html | 7 +- archive/2020/divisions/qf-uffp.html | 7 +- archive/2020/divisions/qf-ufidl.html | 7 +- archive/2020/divisions/qf-uflia.html | 7 +- archive/2020/divisions/qf-uflra.html | 7 +- archive/2020/divisions/qf-ufnia.html | 7 +- archive/2020/divisions/qf-ufnra.html | 7 +- archive/2020/divisions/uf.html | 7 +- archive/2020/divisions/ufbv.html | 7 +- archive/2020/divisions/ufdt.html | 7 +- archive/2020/divisions/ufdtlia.html | 7 +- archive/2020/divisions/ufdtlira.html | 7 +- archive/2020/divisions/ufdtnia.html | 7 +- archive/2020/divisions/ufdtnira.html | 7 +- archive/2020/divisions/uffpdtlira.html | 7 +- archive/2020/divisions/uffpdtnira.html | 7 +- archive/2020/divisions/ufidl.html | 7 +- archive/2020/divisions/uflia.html | 7 +- archive/2020/divisions/uflra.html | 7 +- archive/2020/divisions/ufnia.html | 7 +- archive/2020/divisions/ufnra.html | 7 +- archive/2020/index.html | 7 +- archive/2020/news.html | 7 +- archive/2020/news/2020-04-03.html | 7 +- archive/2020/news/2020-04-20.html | 7 +- archive/2020/news/2020-05-04.html | 7 +- archive/2020/news/2020-05-06.html | 7 +- archive/2020/news/2020-05-13.html | 7 +- archive/2020/news/2020-05-26.html | 7 +- archive/2020/news/2020-05-29.html | 11 +- archive/2020/news/2020-07-05.html | 7 +- archive/2020/news/2020-07-31.html | 7 +- archive/2020/participants.html | 7 +- .../2018-boolector-incremental.html | 8 +- .../participants/2018-cvc4-incremental.html | 8 +- .../participants/2018-cvc4-unsat-core.html | 8 +- archive/2020/participants/2018-cvc4.html | 8 +- .../2018-mathsat-incremental.html | 8 +- .../2020/participants/2018-smtrat-rat.html | 8 +- archive/2020/participants/2018-vampire.html | 8 +- .../participants/2018-yices-incremental.html | 8 +- .../participants/2018-yices-unsat-core.html | 8 +- archive/2020/participants/2018-yices.html | 8 +- .../participants/2018-z3-incremental.html | 8 +- .../2020/participants/2018-z3-unsat-core.html | 8 +- archive/2020/participants/2018-z3.html | 8 +- archive/2020/participants/2019-boolector.html | 8 +- archive/2020/participants/2019-cvc4-inc.html | 8 +- archive/2020/participants/2019-cvc4-uc.html | 8 +- archive/2020/participants/2019-cvc4.html | 8 +- .../participants/2019-mathsat-default.html | 8 +- .../participants/2019-mathsat-na-ext.html | 8 +- archive/2020/participants/2019-par4.html | 8 +- archive/2020/participants/2019-poolector.html | 8 +- .../2020/participants/2019-smtinterpol.html | 8 +- .../2020/participants/2019-spass-satt.html | 8 +- archive/2020/participants/2019-vampire.html | 8 +- .../2019-yices-2-6-2-incremental.html | 8 +- .../2020/participants/2019-yices-2-6-2.html | 8 +- archive/2020/participants/2019-z3.html | 8 +- archive/2020/participants/alt-ergo.html | 8 +- archive/2020/participants/aprove.html | 8 +- archive/2020/participants/bitwuzla-fixed.html | 8 +- archive/2020/participants/bitwuzla.html | 8 +- archive/2020/participants/colibri.html | 8 +- archive/2020/participants/cvc4-inc.html | 8 +- archive/2020/participants/cvc4-mv.html | 8 +- archive/2020/participants/cvc4-uc.html | 8 +- archive/2020/participants/cvc4.html | 8 +- archive/2020/participants/lazybv2int.html | 8 +- archive/2020/participants/mathsat5-mv.html | 8 +- archive/2020/participants/mathsat5.html | 8 +- .../2020/participants/minkeyrink-fixed.html | 8 +- archive/2020/participants/minkeyrink.html | 8 +- archive/2020/participants/opensmt.html | 8 +- archive/2020/participants/smt-rat-calc.html | 8 +- archive/2020/participants/smt-rat-mcsat.html | 8 +- archive/2020/participants/smt-rat.html | 8 +- .../2020/participants/smtinterpol-fixed.html | 8 +- archive/2020/participants/smtinterpol.html | 8 +- archive/2020/participants/stp-cms.html | 8 +- archive/2020/participants/stp-mergesat.html | 8 +- .../ultimateeliminator-mathsat.html | 8 +- archive/2020/participants/vampire.html | 8 +- .../2020/participants/verit-rasat-redlog.html | 8 +- archive/2020/participants/verit-vite.html | 8 +- archive/2020/participants/verit.html | 8 +- .../yices2-fixed-incremental.html | 8 +- .../yices2-fixed-model-validation.html | 8 +- archive/2020/participants/yices2-fixed.html | 8 +- .../2020/participants/yices2-incremental.html | 8 +- .../participants/yices2-model-validation.html | 8 +- archive/2020/participants/yices2.html | 8 +- archive/2020/participants/z3.html | 8 +- archive/2020/participants/z3str4.html | 8 +- archive/2020/results.html | 423 ++++--- archive/2020/results/abvfp-single-query.html | 51 +- .../2020/results/abvfplra-single-query.html | 41 +- archive/2020/results/alia-single-query.html | 121 +- archive/2020/results/ania-incremental.html | 21 +- .../2020/results/aufbvdtlia-single-query.html | 41 +- .../2020/results/aufbvdtlia-unsat-core.html | 23 +- .../2020/results/aufdtlia-single-query.html | 71 +- .../2020/results/aufdtlira-single-query.html | 61 +- .../2020/results/aufdtlira-unsat-core.html | 23 +- .../2020/results/aufdtnira-single-query.html | 61 +- .../2020/results/aufdtnira-unsat-core.html | 23 +- .../results/auffpdtlira-single-query.html | 41 +- .../2020/results/auffpdtlira-unsat-core.html | 23 +- archive/2020/results/auflia-single-query.html | 121 +- archive/2020/results/auflia-unsat-core.html | 35 +- .../2020/results/auflira-single-query.html | 121 +- archive/2020/results/auflira-unsat-core.html | 35 +- archive/2020/results/aufnia-single-query.html | 71 +- archive/2020/results/aufnia-unsat-core.html | 27 +- archive/2020/results/aufnira-incremental.html | 23 +- .../2020/results/aufnira-single-query.html | 81 +- archive/2020/results/aufnira-unsat-core.html | 27 +- .../results/biggest-lead-incremental.html | 87 +- .../biggest-lead-model-validation.html | 67 +- .../results/biggest-lead-single-query.html | 695 +++++----- .../2020/results/biggest-lead-unsat-core.html | 187 ++- archive/2020/results/bv-incremental.html | 23 +- archive/2020/results/bv-single-query.html | 81 +- archive/2020/results/bv-unsat-core.html | 27 +- archive/2020/results/bvfp-incremental.html | 21 +- archive/2020/results/bvfp-single-query.html | 51 +- .../2020/results/bvfplra-single-query.html | 41 +- archive/2020/results/bvfplra-unsat-core.html | 23 +- archive/2020/results/fp-single-query.html | 61 +- archive/2020/results/fplra-single-query.html | 41 +- .../largest-contribution-incremental.html | 63 +- ...largest-contribution-model-validation.html | 67 +- .../largest-contribution-single-query.html | 503 ++++---- .../largest-contribution-unsat-core.html | 123 +- archive/2020/results/lia-incremental.html | 27 +- archive/2020/results/lia-single-query.html | 111 +- archive/2020/results/lia-unsat-core.html | 35 +- archive/2020/results/lra-incremental.html | 25 +- archive/2020/results/lra-single-query.html | 101 +- archive/2020/results/nia-single-query.html | 71 +- archive/2020/results/nia-unsat-core.html | 27 +- archive/2020/results/nra-single-query.html | 71 +- archive/2020/results/qf-abv-incremental.html | 31 +- archive/2020/results/qf-abv-single-query.html | 111 +- archive/2020/results/qf-abv-unsat-core.html | 43 +- .../2020/results/qf-abvfp-incremental.html | 23 +- .../2020/results/qf-abvfp-single-query.html | 71 +- archive/2020/results/qf-abvfp-unsat-core.html | 31 +- .../results/qf-abvfplra-single-query.html | 51 +- .../2020/results/qf-alia-single-query.html | 121 +- archive/2020/results/qf-alia-unsat-core.html | 43 +- .../2020/results/qf-ania-single-query.html | 71 +- .../2020/results/qf-aufbv-incremental.html | 31 +- .../2020/results/qf-aufbv-single-query.html | 101 +- archive/2020/results/qf-aufbv-unsat-core.html | 43 +- .../2020/results/qf-auflia-incremental.html | 31 +- .../2020/results/qf-auflia-single-query.html | 121 +- .../2020/results/qf-auflia-unsat-core.html | 43 +- .../2020/results/qf-aufnia-single-query.html | 71 +- archive/2020/results/qf-ax-single-query.html | 121 +- archive/2020/results/qf-ax-unsat-core.html | 43 +- archive/2020/results/qf-bv-incremental.html | 37 +- .../2020/results/qf-bv-model-validation.html | 55 +- archive/2020/results/qf-bv-single-query.html | 161 ++- archive/2020/results/qf-bv-unsat-core.html | 43 +- archive/2020/results/qf-bvfp-incremental.html | 25 +- .../2020/results/qf-bvfp-single-query.html | 91 +- archive/2020/results/qf-bvfp-unsat-core.html | 35 +- .../2020/results/qf-bvfplra-single-query.html | 51 +- archive/2020/results/qf-dt-single-query.html | 61 +- archive/2020/results/qf-fp-incremental.html | 25 +- archive/2020/results/qf-fp-single-query.html | 91 +- archive/2020/results/qf-fp-unsat-core.html | 35 +- .../2020/results/qf-fplra-single-query.html | 61 +- .../2020/results/qf-idl-model-validation.html | 39 +- archive/2020/results/qf-idl-single-query.html | 121 +- archive/2020/results/qf-lia-incremental.html | 31 +- .../2020/results/qf-lia-model-validation.html | 39 +- archive/2020/results/qf-lia-single-query.html | 111 +- archive/2020/results/qf-lia-unsat-core.html | 43 +- .../results/qf-lira-model-validation.html | 39 +- .../2020/results/qf-lira-single-query.html | 101 +- archive/2020/results/qf-lira-unsat-core.html | 43 +- archive/2020/results/qf-lra-incremental.html | 33 +- .../2020/results/qf-lra-model-validation.html | 43 +- archive/2020/results/qf-lra-single-query.html | 131 +- archive/2020/results/qf-lra-unsat-core.html | 43 +- archive/2020/results/qf-nia-incremental.html | 27 +- archive/2020/results/qf-nia-single-query.html | 101 +- .../2020/results/qf-nira-single-query.html | 91 +- archive/2020/results/qf-nra-single-query.html | 111 +- .../2020/results/qf-rdl-model-validation.html | 39 +- archive/2020/results/qf-rdl-single-query.html | 111 +- archive/2020/results/qf-s-single-query.html | 41 +- .../2020/results/qf-slia-single-query.html | 41 +- archive/2020/results/qf-uf-incremental.html | 33 +- archive/2020/results/qf-uf-single-query.html | 141 ++- archive/2020/results/qf-uf-unsat-core.html | 43 +- archive/2020/results/qf-ufbv-incremental.html | 31 +- .../2020/results/qf-ufbv-single-query.html | 101 +- archive/2020/results/qf-ufbv-unsat-core.html | 43 +- archive/2020/results/qf-uffp-incremental.html | 23 +- .../2020/results/qf-uffp-single-query.html | 71 +- archive/2020/results/qf-uffp-unsat-core.html | 31 +- .../2020/results/qf-ufidl-single-query.html | 111 +- archive/2020/results/qf-ufidl-unsat-core.html | 43 +- .../2020/results/qf-uflia-incremental.html | 31 +- .../2020/results/qf-uflia-single-query.html | 121 +- archive/2020/results/qf-uflia-unsat-core.html | 43 +- .../2020/results/qf-uflra-incremental.html | 31 +- .../2020/results/qf-uflra-single-query.html | 121 +- archive/2020/results/qf-uflra-unsat-core.html | 43 +- .../2020/results/qf-ufnia-incremental.html | 27 +- .../2020/results/qf-ufnia-single-query.html | 91 +- .../2020/results/qf-ufnra-single-query.html | 101 +- archive/2020/results/results-incremental.html | 7 +- .../results/results-model-validation.html | 7 +- .../2020/results/results-single-query.html | 7 +- archive/2020/results/results-unsat-core.html | 7 +- archive/2020/results/uf-incremental.html | 25 +- archive/2020/results/uf-single-query.html | 131 +- archive/2020/results/uf-unsat-core.html | 35 +- archive/2020/results/ufbv-single-query.html | 71 +- archive/2020/results/ufdt-single-query.html | 71 +- archive/2020/results/ufdt-unsat-core.html | 23 +- .../2020/results/ufdtlia-single-query.html | 71 +- .../2020/results/ufdtlira-single-query.html | 61 +- archive/2020/results/ufdtlira-unsat-core.html | 23 +- .../2020/results/ufdtnia-single-query.html | 71 +- .../2020/results/ufdtnira-single-query.html | 61 +- archive/2020/results/ufdtnira-unsat-core.html | 23 +- .../2020/results/uffpdtlira-single-query.html | 41 +- .../2020/results/uffpdtlira-unsat-core.html | 23 +- .../2020/results/uffpdtnira-single-query.html | 41 +- .../2020/results/uffpdtnira-unsat-core.html | 23 +- archive/2020/results/ufidl-single-query.html | 111 +- archive/2020/results/ufidl-unsat-core.html | 35 +- archive/2020/results/uflia-single-query.html | 121 +- archive/2020/results/uflia-unsat-core.html | 35 +- archive/2020/results/uflra-incremental.html | 27 +- archive/2020/results/uflra-single-query.html | 121 +- archive/2020/results/uflra-unsat-core.html | 35 +- archive/2020/results/ufnia-incremental.html | 21 +- archive/2020/results/ufnia-single-query.html | 91 +- archive/2020/results/ufnia-unsat-core.html | 27 +- archive/2020/results/ufnra-incremental.html | 21 +- archive/2020/specs.html | 7 +- archive/2020/tools.html | 49 +- archive/2021/benchmarks.html | 7 +- archive/2021/divisions/arith.html | 7 +- archive/2021/divisions/bitvec.html | 7 +- .../2021/divisions/equality-lineararith.html | 7 +- .../2021/divisions/equality-machinearith.html | 7 +- .../divisions/equality-nonlineararith.html | 7 +- archive/2021/divisions/equality.html | 7 +- archive/2021/divisions/fparith.html | 7 +- archive/2021/divisions/qf-bitvec.html | 7 +- .../divisions/qf-equality-bitvec-arith.html | 7 +- .../2021/divisions/qf-equality-bitvec.html | 7 +- .../divisions/qf-equality-lineararith.html | 7 +- .../divisions/qf-equality-nonlineararith.html | 7 +- archive/2021/divisions/qf-equality.html | 7 +- archive/2021/divisions/qf-fparith.html | 7 +- archive/2021/divisions/qf-linearintarith.html | 7 +- .../2021/divisions/qf-linearrealarith.html | 7 +- .../2021/divisions/qf-nonlinearintarith.html | 7 +- .../2021/divisions/qf-nonlinearrealarith.html | 7 +- archive/2021/divisions/qf-strings.html | 7 +- archive/2021/index.html | 7 +- .../news/2020-12-17-pre-announcement.html | 7 +- .../news/2021-02-05-call-for-benchmark.html | 7 +- .../2021-02-05-parallel-and-cloud-tracks.html | 7 +- .../news/2021-02-05-precall-for-solvers.html | 7 +- .../news/2021-05-07-call-for-solvers.html | 7 +- .../news/2021-06-01-competing-solvers.html | 9 +- .../2021/news/2021-06-18-jobs-running.html | 7 +- .../news/2021-06-30-excluded-benchmarks.html | 7 +- .../news/2021-07-18-competition-results.html | 7 +- archive/2021/parallel-and-cloud-tracks.html | 7 +- archive/2021/parallel-and-cloud-tracks.md | 1 - archive/2021/participants.html | 7 +- archive/2021/participants/2018-cvc4.html | 8 +- .../2018-mathsat-incremental.html | 8 +- .../participants/2018-yices-incremental.html | 8 +- archive/2021/participants/2018-yices.html | 8 +- .../participants/2018-z3-incremental.html | 8 +- archive/2021/participants/2018-z3.html | 8 +- archive/2021/participants/2019-cvc4-inc.html | 8 +- archive/2021/participants/2019-cvc4.html | 8 +- .../participants/2019-mathsat-default.html | 8 +- archive/2021/participants/2019-par4.html | 8 +- .../2021/participants/2019-smtinterpol.html | 8 +- .../2019-yices-2-6-2-incremental.html | 8 +- .../2021/participants/2019-yices-2-6-2.html | 8 +- archive/2021/participants/2019-z3.html | 8 +- .../participants/2020-bitwuzla-fixed.html | 8 +- archive/2021/participants/2020-bitwuzla.html | 8 +- archive/2021/participants/2020-colibri.html | 8 +- archive/2021/participants/2020-cvc4-inc.html | 8 +- archive/2021/participants/2020-cvc4-uc.html | 8 +- archive/2021/participants/2020-cvc4.html | 8 +- archive/2021/participants/2020-mathsat5.html | 8 +- archive/2021/participants/2020-opensmt.html | 8 +- archive/2021/participants/2020-smt-rat.html | 8 +- .../participants/2020-smtinterpol-fixed.html | 8 +- archive/2021/participants/2020-vampire.html | 8 +- .../2020-yices2-fixed-incremental.html | 8 +- .../2020-yices2-fixed-model-validation.html | 8 +- .../2021/participants/2020-yices2-fixed.html | 8 +- .../participants/2020-yices2-incremental.html | 8 +- .../2020-yices2-model-validation.html | 8 +- archive/2021/participants/2020-yices2.html | 8 +- archive/2021/participants/2020-z3.html | 8 +- archive/2021/participants/aprove.html | 8 +- archive/2021/participants/bitwuzla-fixed.html | 8 +- archive/2021/participants/bitwuzla.html | 8 +- archive/2021/participants/colibri-fixed.html | 8 +- archive/2021/participants/colibri.html | 8 +- archive/2021/participants/cvc5-fixed.html | 8 +- archive/2021/participants/cvc5-gg.html | 8 +- archive/2021/participants/cvc5-inc-fixed.html | 8 +- archive/2021/participants/cvc5-inc.html | 8 +- archive/2021/participants/cvc5-mv-fixed.html | 8 +- archive/2021/participants/cvc5-mv.html | 8 +- archive/2021/participants/cvc5-uc-fixed.html | 8 +- archive/2021/participants/cvc5-uc.html | 8 +- archive/2021/participants/cvc5.html | 8 +- archive/2021/participants/iprover-fixed.html | 8 +- archive/2021/participants/iprover-fixed2.html | 8 +- archive/2021/participants/iprover.html | 8 +- archive/2021/participants/mathsat5.html | 8 +- archive/2021/participants/mc2.html | 8 +- archive/2021/participants/opensmt-fixed.html | 8 +- archive/2021/participants/opensmt.html | 8 +- archive/2021/participants/par4.html | 8 +- archive/2021/participants/smt-rat-mcsat.html | 8 +- archive/2021/participants/smt-rat.html | 8 +- .../2021/participants/smtinterpol-remus.html | 8 +- archive/2021/participants/smtinterpol.html | 8 +- .../participants/smts-cube-and-conquer.html | 8 +- archive/2021/participants/smts-portfolio.html | 8 +- archive/2021/participants/stp-cms-cloud.html | 8 +- archive/2021/participants/stp-parallel.html | 8 +- archive/2021/participants/stp.html | 8 +- .../ultimateeliminator-mathsat.html | 8 +- archive/2021/participants/vampire-fixed.html | 8 +- archive/2021/participants/vampire.html | 8 +- .../2021/participants/verit-rasat-redlog.html | 8 +- archive/2021/participants/verit.html | 8 +- .../2021/participants/yices2-incremental.html | 8 +- .../participants/yices2-model-validation.html | 8 +- archive/2021/participants/yices2-qs.html | 8 +- archive/2021/participants/yices2.html | 8 +- archive/2021/participants/yicesls.html | 8 +- archive/2021/participants/z3-mv.html | 8 +- archive/2021/participants/z3.html | 8 +- archive/2021/participants/z3str4-fixed.html | 8 +- archive/2021/participants/z3str4.html | 8 +- archive/2021/results.html | 211 ++-- archive/2021/results/abv-single-query.html | 81 +- archive/2021/results/abv-unsat-core.html | 31 +- archive/2021/results/abvfp-single-query.html | 61 +- .../2021/results/abvfplra-single-query.html | 61 +- archive/2021/results/alia-incremental.html | 25 +- archive/2021/results/alia-single-query.html | 141 ++- archive/2021/results/ania-incremental.html | 23 +- archive/2021/results/arith-cloud.html | 35 +- archive/2021/results/arith-incremental.html | 27 +- archive/2021/results/arith-parallel.html | 35 +- archive/2021/results/arith-single-query.html | 161 ++- archive/2021/results/arith-unsat-core.html | 43 +- archive/2021/results/aufbv-single-query.html | 61 +- .../2021/results/aufbvdtlia-single-query.html | 51 +- .../2021/results/aufbvdtlia-unsat-core.html | 23 +- .../2021/results/aufbvdtnia-single-query.html | 41 +- .../2021/results/aufbvfp-single-query.html | 51 +- .../2021/results/aufdtlia-single-query.html | 91 +- archive/2021/results/aufdtlira-cloud.html | 27 +- archive/2021/results/aufdtlira-parallel.html | 27 +- .../2021/results/aufdtlira-single-query.html | 81 +- .../2021/results/aufdtlira-unsat-core.html | 27 +- archive/2021/results/aufdtnira-cloud.html | 27 +- archive/2021/results/aufdtnira-parallel.html | 27 +- .../2021/results/aufdtnira-single-query.html | 81 +- .../2021/results/aufdtnira-unsat-core.html | 27 +- .../results/auffpdtlira-single-query.html | 51 +- .../2021/results/auffpdtlira-unsat-core.html | 23 +- .../results/auffpdtnira-single-query.html | 41 +- archive/2021/results/auflia-cloud.html | 27 +- archive/2021/results/auflia-parallel.html | 27 +- archive/2021/results/auflia-single-query.html | 141 ++- archive/2021/results/auflia-unsat-core.html | 47 +- archive/2021/results/auflira-cloud.html | 35 +- archive/2021/results/auflira-parallel.html | 35 +- .../2021/results/auflira-single-query.html | 141 ++- archive/2021/results/auflira-unsat-core.html | 47 +- archive/2021/results/aufnia-cloud.html | 27 +- archive/2021/results/aufnia-parallel.html | 27 +- archive/2021/results/aufnia-single-query.html | 101 +- archive/2021/results/aufnia-unsat-core.html | 35 +- archive/2021/results/aufnira-cloud.html | 35 +- archive/2021/results/aufnira-incremental.html | 29 +- archive/2021/results/aufnira-parallel.html | 35 +- .../2021/results/aufnira-single-query.html | 111 +- archive/2021/results/aufnira-unsat-core.html | 35 +- archive/2021/results/biggest-lead-cloud.html | 21 +- .../results/biggest-lead-incremental.html | 65 +- .../biggest-lead-model-validation.html | 67 +- .../2021/results/biggest-lead-parallel.html | 17 +- .../results/biggest-lead-single-query.html | 247 ++-- .../2021/results/biggest-lead-unsat-core.html | 111 +- archive/2021/results/bitvec-cloud.html | 27 +- archive/2021/results/bitvec-incremental.html | 23 +- archive/2021/results/bitvec-parallel.html | 27 +- archive/2021/results/bitvec-single-query.html | 71 +- archive/2021/results/bitvec-unsat-core.html | 31 +- archive/2021/results/bv-cloud.html | 27 +- archive/2021/results/bv-incremental.html | 23 +- archive/2021/results/bv-parallel.html | 27 +- archive/2021/results/bv-single-query.html | 71 +- archive/2021/results/bv-unsat-core.html | 31 +- archive/2021/results/bvfp-incremental.html | 21 +- archive/2021/results/bvfp-single-query.html | 61 +- .../2021/results/bvfplra-single-query.html | 51 +- archive/2021/results/bvfplra-unsat-core.html | 27 +- archive/2021/results/equality-cloud.html | 35 +- .../2021/results/equality-incremental.html | 25 +- .../results/equality-lineararith-cloud.html | 35 +- .../equality-lineararith-incremental.html | 27 +- .../equality-lineararith-parallel.html | 35 +- .../equality-lineararith-single-query.html | 181 ++- .../equality-lineararith-unsat-core.html | 47 +- .../results/equality-machinearith-cloud.html | 27 +- .../equality-machinearith-parallel.html | 27 +- .../equality-machinearith-single-query.html | 91 +- .../equality-machinearith-unsat-core.html | 31 +- .../equality-nonlineararith-cloud.html | 35 +- .../equality-nonlineararith-incremental.html | 29 +- .../equality-nonlineararith-parallel.html | 35 +- .../equality-nonlineararith-single-query.html | 131 +- .../equality-nonlineararith-unsat-core.html | 39 +- archive/2021/results/equality-parallel.html | 35 +- .../2021/results/equality-single-query.html | 151 ++- archive/2021/results/equality-unsat-core.html | 43 +- archive/2021/results/fp-single-query.html | 71 +- archive/2021/results/fparith-incremental.html | 21 +- .../2021/results/fparith-single-query.html | 71 +- archive/2021/results/fparith-unsat-core.html | 27 +- archive/2021/results/fplra-single-query.html | 51 +- .../results/largest-contribution-cloud.html | 21 +- .../largest-contribution-incremental.html | 59 +- ...largest-contribution-model-validation.html | 67 +- .../largest-contribution-single-query.html | 209 ++-- .../largest-contribution-unsat-core.html | 83 +- archive/2021/results/lia-incremental.html | 27 +- archive/2021/results/lia-single-query.html | 131 +- archive/2021/results/lia-unsat-core.html | 43 +- archive/2021/results/lra-cloud.html | 35 +- archive/2021/results/lra-incremental.html | 27 +- archive/2021/results/lra-parallel.html | 35 +- archive/2021/results/lra-single-query.html | 131 +- archive/2021/results/nia-cloud.html | 27 +- archive/2021/results/nia-parallel.html | 27 +- archive/2021/results/nia-single-query.html | 121 +- archive/2021/results/nia-unsat-core.html | 35 +- archive/2021/results/nra-cloud.html | 35 +- archive/2021/results/nra-parallel.html | 35 +- archive/2021/results/nra-single-query.html | 131 +- archive/2021/results/qf-abv-cloud.html | 27 +- archive/2021/results/qf-abv-incremental.html | 29 +- archive/2021/results/qf-abv-parallel.html | 27 +- archive/2021/results/qf-abv-single-query.html | 101 +- archive/2021/results/qf-abv-unsat-core.html | 43 +- .../2021/results/qf-abvfp-incremental.html | 27 +- .../2021/results/qf-abvfp-single-query.html | 121 +- archive/2021/results/qf-abvfp-unsat-core.html | 35 +- .../results/qf-abvfplra-single-query.html | 101 +- archive/2021/results/qf-alia-incremental.html | 29 +- .../2021/results/qf-alia-single-query.html | 101 +- archive/2021/results/qf-alia-unsat-core.html | 51 +- archive/2021/results/qf-ania-incremental.html | 25 +- .../2021/results/qf-ania-single-query.html | 81 +- archive/2021/results/qf-ania-unsat-core.html | 27 +- .../2021/results/qf-aufbv-incremental.html | 29 +- .../2021/results/qf-aufbv-single-query.html | 101 +- archive/2021/results/qf-aufbv-unsat-core.html | 43 +- .../2021/results/qf-aufbvfp-single-query.html | 71 +- .../2021/results/qf-auflia-incremental.html | 29 +- .../2021/results/qf-auflia-single-query.html | 101 +- .../2021/results/qf-auflia-unsat-core.html | 51 +- .../2021/results/qf-aufnia-single-query.html | 81 +- .../2021/results/qf-aufnia-unsat-core.html | 27 +- archive/2021/results/qf-ax-single-query.html | 101 +- archive/2021/results/qf-ax-unsat-core.html | 47 +- archive/2021/results/qf-bitvec-cloud.html | 35 +- .../2021/results/qf-bitvec-incremental.html | 29 +- .../results/qf-bitvec-model-validation.html | 43 +- archive/2021/results/qf-bitvec-parallel.html | 35 +- .../2021/results/qf-bitvec-single-query.html | 91 +- .../2021/results/qf-bitvec-unsat-core.html | 39 +- archive/2021/results/qf-bv-cloud.html | 35 +- archive/2021/results/qf-bv-incremental.html | 29 +- .../2021/results/qf-bv-model-validation.html | 43 +- archive/2021/results/qf-bv-parallel.html | 35 +- archive/2021/results/qf-bv-single-query.html | 91 +- archive/2021/results/qf-bv-unsat-core.html | 39 +- archive/2021/results/qf-bvfp-cloud.html | 27 +- archive/2021/results/qf-bvfp-incremental.html | 29 +- archive/2021/results/qf-bvfp-parallel.html | 27 +- .../2021/results/qf-bvfp-single-query.html | 131 +- archive/2021/results/qf-bvfp-unsat-core.html | 39 +- .../2021/results/qf-bvfplra-single-query.html | 101 +- .../2021/results/qf-bvfplra-unsat-core.html | 27 +- archive/2021/results/qf-dt-single-query.html | 61 +- archive/2021/results/qf-dt-unsat-core.html | 31 +- .../results/qf-equality-bitvec-cloud.html | 27 +- .../qf-equality-bitvec-incremental.html | 29 +- .../qf-equality-bitvec-model-validation.html | 31 +- .../results/qf-equality-bitvec-parallel.html | 27 +- .../qf-equality-bitvec-single-query.html | 101 +- .../qf-equality-bitvec-unsat-core.html | 43 +- .../2021/results/qf-equality-incremental.html | 29 +- .../qf-equality-lineararith-incremental.html | 31 +- ...equality-lineararith-model-validation.html | 31 +- .../qf-equality-lineararith-single-query.html | 131 +- .../qf-equality-lineararith-unsat-core.html | 51 +- .../results/qf-equality-model-validation.html | 35 +- .../qf-equality-nonlineararith-cloud.html | 27 +- ...f-equality-nonlineararith-incremental.html | 27 +- .../qf-equality-nonlineararith-parallel.html | 27 +- ...-equality-nonlineararith-single-query.html | 121 +- ...qf-equality-nonlineararith-unsat-core.html | 31 +- .../results/qf-equality-single-query.html | 131 +- .../2021/results/qf-equality-unsat-core.html | 47 +- archive/2021/results/qf-fp-cloud.html | 27 +- archive/2021/results/qf-fp-incremental.html | 29 +- archive/2021/results/qf-fp-parallel.html | 27 +- archive/2021/results/qf-fp-single-query.html | 131 +- archive/2021/results/qf-fp-unsat-core.html | 39 +- archive/2021/results/qf-fparith-cloud.html | 27 +- .../2021/results/qf-fparith-incremental.html | 29 +- archive/2021/results/qf-fparith-parallel.html | 27 +- .../2021/results/qf-fparith-single-query.html | 141 ++- .../2021/results/qf-fparith-unsat-core.html | 39 +- .../2021/results/qf-fplra-single-query.html | 111 +- archive/2021/results/qf-idl-cloud.html | 43 +- .../2021/results/qf-idl-model-validation.html | 55 +- archive/2021/results/qf-idl-parallel.html | 27 +- archive/2021/results/qf-idl-single-query.html | 131 +- archive/2021/results/qf-idl-unsat-core.html | 43 +- archive/2021/results/qf-lia-cloud.html | 43 +- archive/2021/results/qf-lia-incremental.html | 29 +- .../2021/results/qf-lia-model-validation.html | 51 +- archive/2021/results/qf-lia-parallel.html | 27 +- archive/2021/results/qf-lia-single-query.html | 121 +- archive/2021/results/qf-lia-unsat-core.html | 43 +- .../2021/results/qf-linearintarith-cloud.html | 43 +- .../qf-linearintarith-incremental.html | 29 +- .../qf-linearintarith-model-validation.html | 55 +- .../results/qf-linearintarith-parallel.html | 27 +- .../qf-linearintarith-single-query.html | 131 +- .../results/qf-linearintarith-unsat-core.html | 43 +- .../results/qf-linearrealarith-cloud.html | 43 +- .../qf-linearrealarith-incremental.html | 29 +- .../qf-linearrealarith-model-validation.html | 47 +- .../results/qf-linearrealarith-parallel.html | 27 +- .../qf-linearrealarith-single-query.html | 131 +- .../qf-linearrealarith-unsat-core.html | 43 +- .../results/qf-lira-model-validation.html | 47 +- .../2021/results/qf-lira-single-query.html | 101 +- archive/2021/results/qf-lira-unsat-core.html | 43 +- archive/2021/results/qf-lra-cloud.html | 43 +- archive/2021/results/qf-lra-incremental.html | 29 +- .../2021/results/qf-lra-model-validation.html | 47 +- archive/2021/results/qf-lra-parallel.html | 27 +- archive/2021/results/qf-lra-single-query.html | 121 +- archive/2021/results/qf-lra-unsat-core.html | 43 +- archive/2021/results/qf-nia-cloud.html | 27 +- archive/2021/results/qf-nia-incremental.html | 27 +- archive/2021/results/qf-nia-parallel.html | 27 +- archive/2021/results/qf-nia-single-query.html | 111 +- archive/2021/results/qf-nia-unsat-core.html | 31 +- .../2021/results/qf-nira-single-query.html | 91 +- archive/2021/results/qf-nira-unsat-core.html | 31 +- .../results/qf-nonlinearintarith-cloud.html | 27 +- .../qf-nonlinearintarith-incremental.html | 27 +- .../qf-nonlinearintarith-parallel.html | 27 +- .../qf-nonlinearintarith-single-query.html | 111 +- .../qf-nonlinearintarith-unsat-core.html | 31 +- .../results/qf-nonlinearrealarith-cloud.html | 27 +- .../qf-nonlinearrealarith-parallel.html | 27 +- .../qf-nonlinearrealarith-single-query.html | 91 +- .../qf-nonlinearrealarith-unsat-core.html | 31 +- archive/2021/results/qf-nra-cloud.html | 27 +- archive/2021/results/qf-nra-parallel.html | 27 +- archive/2021/results/qf-nra-single-query.html | 91 +- archive/2021/results/qf-nra-unsat-core.html | 31 +- archive/2021/results/qf-rdl-cloud.html | 35 +- .../2021/results/qf-rdl-model-validation.html | 43 +- archive/2021/results/qf-rdl-single-query.html | 101 +- archive/2021/results/qf-s-single-query.html | 71 +- .../2021/results/qf-slia-single-query.html | 71 +- .../2021/results/qf-snia-single-query.html | 51 +- .../2021/results/qf-strings-single-query.html | 71 +- archive/2021/results/qf-uf-incremental.html | 29 +- .../2021/results/qf-uf-model-validation.html | 39 +- archive/2021/results/qf-uf-single-query.html | 131 +- archive/2021/results/qf-uf-unsat-core.html | 47 +- archive/2021/results/qf-ufbv-incremental.html | 29 +- .../results/qf-ufbv-model-validation.html | 35 +- .../2021/results/qf-ufbv-single-query.html | 101 +- archive/2021/results/qf-ufbv-unsat-core.html | 43 +- .../2021/results/qf-ufdt-single-query.html | 51 +- archive/2021/results/qf-ufdt-unsat-core.html | 31 +- .../results/qf-ufdtlira-single-query.html | 51 +- .../2021/results/qf-ufdtlira-unsat-core.html | 27 +- archive/2021/results/qf-uffp-incremental.html | 27 +- archive/2021/results/qf-uffp-unsat-core.html | 35 +- .../results/qf-uffpdtlira-single-query.html | 41 +- .../results/qf-uffpdtlira-unsat-core.html | 19 +- .../results/qf-ufidl-model-validation.html | 35 +- .../2021/results/qf-ufidl-single-query.html | 101 +- archive/2021/results/qf-ufidl-unsat-core.html | 51 +- .../2021/results/qf-uflia-incremental.html | 29 +- .../results/qf-uflia-model-validation.html | 35 +- .../2021/results/qf-uflia-single-query.html | 101 +- archive/2021/results/qf-uflia-unsat-core.html | 51 +- .../2021/results/qf-uflra-incremental.html | 27 +- .../results/qf-uflra-model-validation.html | 35 +- .../2021/results/qf-uflra-single-query.html | 111 +- archive/2021/results/qf-uflra-unsat-core.html | 51 +- .../2021/results/qf-ufnia-incremental.html | 27 +- .../2021/results/qf-ufnia-single-query.html | 91 +- archive/2021/results/qf-ufnia-unsat-core.html | 31 +- archive/2021/results/qf-ufnra-cloud.html | 27 +- .../2021/results/qf-ufnra-incremental.html | 23 +- archive/2021/results/qf-ufnra-parallel.html | 27 +- .../2021/results/qf-ufnra-single-query.html | 101 +- archive/2021/results/qf-ufnra-unsat-core.html | 31 +- archive/2021/results/results-cloud.html | 7 +- archive/2021/results/results-incremental.html | 7 +- .../results/results-model-validation.html | 7 +- archive/2021/results/results-parallel.html | 7 +- .../2021/results/results-single-query.html | 7 +- archive/2021/results/results-unsat-core.html | 7 +- archive/2021/results/uf-cloud.html | 35 +- archive/2021/results/uf-incremental.html | 25 +- archive/2021/results/uf-parallel.html | 35 +- archive/2021/results/uf-single-query.html | 151 ++- archive/2021/results/uf-unsat-core.html | 43 +- archive/2021/results/ufbv-cloud.html | 27 +- archive/2021/results/ufbv-parallel.html | 27 +- archive/2021/results/ufbv-single-query.html | 91 +- archive/2021/results/ufbvfp-single-query.html | 51 +- .../2021/results/ufbvlia-single-query.html | 51 +- archive/2021/results/ufdt-cloud.html | 27 +- archive/2021/results/ufdt-parallel.html | 27 +- archive/2021/results/ufdt-single-query.html | 101 +- archive/2021/results/ufdt-unsat-core.html | 27 +- archive/2021/results/ufdtlia-cloud.html | 27 +- archive/2021/results/ufdtlia-parallel.html | 27 +- .../2021/results/ufdtlia-single-query.html | 101 +- archive/2021/results/ufdtlira-cloud.html | 27 +- archive/2021/results/ufdtlira-parallel.html | 27 +- .../2021/results/ufdtlira-single-query.html | 91 +- archive/2021/results/ufdtlira-unsat-core.html | 27 +- archive/2021/results/ufdtnia-incremental.html | 17 +- .../2021/results/ufdtnia-single-query.html | 91 +- archive/2021/results/ufdtnira-cloud.html | 27 +- archive/2021/results/ufdtnira-parallel.html | 27 +- .../2021/results/ufdtnira-single-query.html | 91 +- archive/2021/results/ufdtnira-unsat-core.html | 27 +- .../2021/results/uffpdtlira-single-query.html | 51 +- .../2021/results/uffpdtlira-unsat-core.html | 23 +- .../2021/results/uffpdtnira-single-query.html | 51 +- .../2021/results/uffpdtnira-unsat-core.html | 23 +- archive/2021/results/ufidl-cloud.html | 35 +- archive/2021/results/ufidl-parallel.html | 35 +- archive/2021/results/ufidl-single-query.html | 151 ++- archive/2021/results/ufidl-unsat-core.html | 47 +- archive/2021/results/uflia-cloud.html | 35 +- archive/2021/results/uflia-parallel.html | 35 +- archive/2021/results/uflia-single-query.html | 151 ++- archive/2021/results/uflia-unsat-core.html | 47 +- archive/2021/results/uflra-cloud.html | 27 +- archive/2021/results/uflra-incremental.html | 27 +- archive/2021/results/uflra-parallel.html | 27 +- archive/2021/results/uflra-single-query.html | 151 ++- archive/2021/results/uflra-unsat-core.html | 47 +- archive/2021/results/ufnia-cloud.html | 35 +- archive/2021/results/ufnia-incremental.html | 27 +- archive/2021/results/ufnia-parallel.html | 35 +- archive/2021/results/ufnia-single-query.html | 131 +- archive/2021/results/ufnia-unsat-core.html | 39 +- archive/2021/results/ufnra-incremental.html | 27 +- archive/2021/specs.html | 7 +- archive/2021/system-descriptions/mc2.html | 7 +- archive/2021/tools.html | 49 +- archive/2022/benchmarks.html | 7 +- archive/2022/comparisons.html | 7 +- archive/2022/divisions/arith.html | 7 +- archive/2022/divisions/bitvec.html | 7 +- .../2022/divisions/equality-lineararith.html | 7 +- .../2022/divisions/equality-machinearith.html | 7 +- .../divisions/equality-nonlineararith.html | 7 +- archive/2022/divisions/equality.html | 7 +- archive/2022/divisions/fparith.html | 7 +- archive/2022/divisions/qf-bitvec.html | 7 +- archive/2022/divisions/qf-datatypes.html | 7 +- .../divisions/qf-equality-bitvec-arith.html | 7 +- .../2022/divisions/qf-equality-bitvec.html | 7 +- .../divisions/qf-equality-lineararith.html | 7 +- .../divisions/qf-equality-nonlineararith.html | 7 +- archive/2022/divisions/qf-equality.html | 7 +- archive/2022/divisions/qf-fparith.html | 7 +- archive/2022/divisions/qf-linearintarith.html | 7 +- .../2022/divisions/qf-linearrealarith.html | 7 +- .../2022/divisions/qf-nonlinearintarith.html | 7 +- .../2022/divisions/qf-nonlinearrealarith.html | 7 +- archive/2022/divisions/qf-strings.html | 7 +- archive/2022/index.html | 7 +- .../news/2022-02-09-call-for-benchmark.html | 7 +- .../news/2022-02-22-call-for-comments.html | 7 +- .../news/2022-03-22-call-for-solvers.html | 7 +- .../2022-06-01-final-call-for-solvers.html | 7 +- .../news/2022-08-10-competition-results.html | 7 +- archive/2022/parallel-and-cloud-tracks.html | 7 +- archive/2022/participants.html | 7 +- .../2018-mathsat-incremental.html | 8 +- archive/2022/participants/2019-cvc4-inc.html | 8 +- archive/2022/participants/2019-par4.html | 8 +- archive/2022/participants/2019-z3.html | 8 +- .../participants/2020-bitwuzla-fixed.html | 8 +- archive/2022/participants/2020-bitwuzla.html | 8 +- archive/2022/participants/2020-cvc4-uc.html | 8 +- archive/2022/participants/2020-cvc4.html | 8 +- .../participants/2020-yices2-incremental.html | 8 +- archive/2022/participants/2020-yices2.html | 8 +- archive/2022/participants/2020-z3.html | 8 +- .../participants/2021-bitwuzla-fixed.html | 8 +- archive/2022/participants/2021-bitwuzla.html | 8 +- archive/2022/participants/2021-cvc5-inc.html | 8 +- archive/2022/participants/2021-cvc5-uc.html | 8 +- archive/2022/participants/2021-cvc5.html | 8 +- archive/2022/participants/2021-mathsat5.html | 8 +- .../2022/participants/2021-smtinterpol.html | 8 +- archive/2022/participants/2021-stp.html | 8 +- .../participants/2021-yices2-incremental.html | 8 +- .../2021-yices2-model-validation.html | 8 +- archive/2022/participants/2021-yices2.html | 8 +- archive/2022/participants/2021-z3.html | 8 +- archive/2022/participants/bitwuzla.html | 8 +- archive/2022/participants/colibri.html | 8 +- archive/2022/participants/cvc5-cloud.html | 8 +- archive/2022/participants/cvc5-lfsc.html | 8 +- archive/2022/participants/cvc5.html | 8 +- archive/2022/participants/mathsat.html | 8 +- archive/2022/participants/nra-ls.html | 8 +- archive/2022/participants/opensmt-fixed.html | 8 +- archive/2022/participants/opensmt.html | 8 +- archive/2022/participants/ostrich.html | 8 +- archive/2022/participants/q3b-pbdd.html | 8 +- archive/2022/participants/q3b.html | 8 +- .../participants/smt-rat-mcsat-22-06.html | 8 +- .../2022/participants/smtinterpol-fixed.html | 8 +- archive/2022/participants/smtinterpol.html | 8 +- .../participants/smts-cube-and-conquer.html | 8 +- archive/2022/participants/smts-portfolio.html | 8 +- archive/2022/participants/solsmt.html | 8 +- archive/2022/participants/stp-fixed.html | 8 +- archive/2022/participants/stp.html | 8 +- .../ultimateeliminator-mathsat.html | 8 +- archive/2022/participants/vampire.html | 8 +- .../2022/participants/verit-rasat-redlog.html | 8 +- archive/2022/participants/verit.html | 8 +- .../2022/participants/yices-ismt-fixed.html | 8 +- archive/2022/participants/yices-ismt.html | 8 +- archive/2022/participants/yices2.html | 8 +- archive/2022/participants/yicesqs.html | 8 +- archive/2022/participants/z3-4-8-17.html | 8 +- archive/2022/participants/z3-bv.html | 8 +- archive/2022/participants/z3-fixed.html | 8 +- archive/2022/participants/z3.html | 8 +- archive/2022/participants/z3str4.html | 8 +- archive/2022/proof-track.html | 7 +- archive/2022/results.html | 227 ++-- .../2022/results/abv-proof-exhibition.html | 19 +- archive/2022/results/abv-single-query.html | 71 +- archive/2022/results/abv-unsat-core.html | 31 +- .../2022/results/abvfp-proof-exhibition.html | 19 +- archive/2022/results/abvfp-single-query.html | 71 +- archive/2022/results/abvfp-unsat-core.html | 31 +- .../2022/results/abvfplra-incremental.html | 21 +- .../results/abvfplra-proof-exhibition.html | 19 +- .../2022/results/abvfplra-single-query.html | 71 +- archive/2022/results/abvfplra-unsat-core.html | 31 +- archive/2022/results/alia-incremental.html | 25 +- .../2022/results/alia-proof-exhibition.html | 27 +- archive/2022/results/alia-single-query.html | 101 +- archive/2022/results/ania-incremental.html | 21 +- archive/2022/results/arith-cloud.html | 27 +- archive/2022/results/arith-incremental.html | 25 +- archive/2022/results/arith-parallel.html | 19 +- .../2022/results/arith-proof-exhibition.html | 27 +- archive/2022/results/arith-single-query.html | 101 +- archive/2022/results/arith-unsat-core.html | 39 +- .../2022/results/aufbv-proof-exhibition.html | 19 +- archive/2022/results/aufbv-single-query.html | 71 +- .../results/aufbvdtlia-proof-exhibition.html | 19 +- .../2022/results/aufbvdtlia-single-query.html | 61 +- .../2022/results/aufbvdtlia-unsat-core.html | 31 +- .../results/aufbvdtnia-proof-exhibition.html | 19 +- .../2022/results/aufbvdtnia-single-query.html | 61 +- .../2022/results/aufbvdtnia-unsat-core.html | 31 +- .../results/aufbvdtnira-proof-exhibition.html | 19 +- .../results/aufbvdtnira-single-query.html | 51 +- .../2022/results/aufbvdtnira-unsat-core.html | 27 +- .../results/aufbvfp-proof-exhibition.html | 19 +- .../2022/results/aufbvfp-single-query.html | 71 +- .../results/aufdtlia-proof-exhibition.html | 23 +- .../2022/results/aufdtlia-single-query.html | 91 +- archive/2022/results/aufdtlia-unsat-core.html | 39 +- archive/2022/results/aufdtlira-cloud.html | 27 +- archive/2022/results/aufdtlira-parallel.html | 19 +- .../results/aufdtlira-proof-exhibition.html | 23 +- .../2022/results/aufdtlira-single-query.html | 91 +- .../2022/results/aufdtlira-unsat-core.html | 39 +- archive/2022/results/aufdtnira-cloud.html | 27 +- archive/2022/results/aufdtnira-parallel.html | 19 +- .../results/aufdtnira-proof-exhibition.html | 19 +- .../2022/results/aufdtnira-single-query.html | 71 +- .../2022/results/aufdtnira-unsat-core.html | 35 +- .../results/auffpdtnira-proof-exhibition.html | 19 +- .../results/auffpdtnira-single-query.html | 51 +- .../2022/results/auffpdtnira-unsat-core.html | 27 +- archive/2022/results/auflia-cloud.html | 27 +- archive/2022/results/auflia-parallel.html | 19 +- .../2022/results/auflia-proof-exhibition.html | 27 +- archive/2022/results/auflia-single-query.html | 101 +- archive/2022/results/auflia-unsat-core.html | 39 +- archive/2022/results/auflira-cloud.html | 27 +- archive/2022/results/auflira-parallel.html | 19 +- .../results/auflira-proof-exhibition.html | 27 +- .../2022/results/auflira-single-query.html | 101 +- archive/2022/results/auflira-unsat-core.html | 39 +- .../2022/results/aufnia-proof-exhibition.html | 19 +- archive/2022/results/aufnia-single-query.html | 71 +- archive/2022/results/aufnia-unsat-core.html | 35 +- archive/2022/results/aufnira-cloud.html | 27 +- archive/2022/results/aufnira-incremental.html | 25 +- archive/2022/results/aufnira-parallel.html | 19 +- .../results/aufnira-proof-exhibition.html | 19 +- .../2022/results/aufnira-single-query.html | 71 +- archive/2022/results/aufnira-unsat-core.html | 35 +- .../results/biggest-lead-incremental.html | 67 +- .../biggest-lead-model-validation.html | 71 +- .../results/biggest-lead-single-query.html | 257 ++-- .../2022/results/biggest-lead-unsat-core.html | 103 +- archive/2022/results/bitvec-incremental.html | 25 +- .../2022/results/bitvec-proof-exhibition.html | 19 +- archive/2022/results/bitvec-single-query.html | 101 +- archive/2022/results/bitvec-unsat-core.html | 31 +- archive/2022/results/bv-incremental.html | 25 +- archive/2022/results/bv-proof-exhibition.html | 19 +- archive/2022/results/bv-single-query.html | 101 +- archive/2022/results/bv-unsat-core.html | 31 +- archive/2022/results/bvfp-incremental.html | 23 +- .../2022/results/bvfp-proof-exhibition.html | 19 +- archive/2022/results/bvfp-single-query.html | 71 +- archive/2022/results/bvfp-unsat-core.html | 31 +- archive/2022/results/bvfplra-incremental.html | 21 +- .../results/bvfplra-proof-exhibition.html | 19 +- .../2022/results/bvfplra-single-query.html | 71 +- archive/2022/results/bvfplra-unsat-core.html | 31 +- archive/2022/results/equality-cloud.html | 27 +- .../2022/results/equality-incremental.html | 25 +- .../results/equality-lineararith-cloud.html | 27 +- .../equality-lineararith-incremental.html | 25 +- .../equality-lineararith-parallel.html | 19 +- ...equality-lineararith-proof-exhibition.html | 27 +- .../equality-lineararith-single-query.html | 101 +- .../equality-lineararith-unsat-core.html | 39 +- .../equality-machinearith-incremental.html | 21 +- ...quality-machinearith-proof-exhibition.html | 19 +- .../equality-machinearith-single-query.html | 71 +- .../equality-machinearith-unsat-core.html | 31 +- .../equality-nonlineararith-cloud.html | 27 +- .../equality-nonlineararith-incremental.html | 25 +- .../equality-nonlineararith-parallel.html | 19 +- ...ality-nonlineararith-proof-exhibition.html | 19 +- .../equality-nonlineararith-single-query.html | 71 +- .../equality-nonlineararith-unsat-core.html | 35 +- archive/2022/results/equality-parallel.html | 19 +- .../results/equality-proof-exhibition.html | 27 +- .../2022/results/equality-single-query.html | 101 +- archive/2022/results/equality-unsat-core.html | 39 +- archive/2022/results/fp-proof-exhibition.html | 19 +- archive/2022/results/fp-single-query.html | 71 +- archive/2022/results/fparith-incremental.html | 23 +- .../results/fparith-proof-exhibition.html | 19 +- .../2022/results/fparith-single-query.html | 71 +- archive/2022/results/fparith-unsat-core.html | 31 +- .../2022/results/fplra-proof-exhibition.html | 19 +- archive/2022/results/fplra-single-query.html | 71 +- archive/2022/results/fplra-unsat-core.html | 31 +- .../largest-contribution-incremental.html | 65 +- ...largest-contribution-model-validation.html | 67 +- .../largest-contribution-single-query.html | 241 ++-- .../largest-contribution-unsat-core.html | 83 +- archive/2022/results/lia-incremental.html | 25 +- .../2022/results/lia-proof-exhibition.html | 27 +- archive/2022/results/lia-single-query.html | 101 +- archive/2022/results/lia-unsat-core.html | 39 +- archive/2022/results/lra-cloud.html | 27 +- archive/2022/results/lra-incremental.html | 25 +- archive/2022/results/lra-parallel.html | 19 +- .../2022/results/lra-proof-exhibition.html | 23 +- archive/2022/results/lra-single-query.html | 91 +- .../2022/results/nia-proof-exhibition.html | 19 +- archive/2022/results/nia-single-query.html | 81 +- archive/2022/results/nia-unsat-core.html | 35 +- archive/2022/results/nra-cloud.html | 27 +- archive/2022/results/nra-parallel.html | 19 +- .../2022/results/nra-proof-exhibition.html | 19 +- archive/2022/results/nra-single-query.html | 81 +- archive/2022/results/qf-abv-incremental.html | 27 +- .../2022/results/qf-abv-proof-exhibition.html | 19 +- archive/2022/results/qf-abv-single-query.html | 81 +- archive/2022/results/qf-abv-unsat-core.html | 39 +- .../2022/results/qf-abvfp-incremental.html | 23 +- .../results/qf-abvfp-proof-exhibition.html | 19 +- .../2022/results/qf-abvfp-single-query.html | 81 +- archive/2022/results/qf-abvfp-unsat-core.html | 35 +- .../2022/results/qf-abvfplra-incremental.html | 21 +- .../results/qf-abvfplra-proof-exhibition.html | 19 +- .../results/qf-abvfplra-single-query.html | 81 +- .../2022/results/qf-abvfplra-unsat-core.html | 35 +- archive/2022/results/qf-alia-incremental.html | 27 +- .../results/qf-alia-proof-exhibition.html | 27 +- .../2022/results/qf-alia-single-query.html | 91 +- archive/2022/results/qf-alia-unsat-core.html | 39 +- archive/2022/results/qf-ania-incremental.html | 25 +- .../results/qf-ania-proof-exhibition.html | 23 +- .../2022/results/qf-ania-single-query.html | 71 +- archive/2022/results/qf-ania-unsat-core.html | 35 +- .../2022/results/qf-aufbv-incremental.html | 27 +- .../results/qf-aufbv-proof-exhibition.html | 19 +- .../2022/results/qf-aufbv-single-query.html | 81 +- archive/2022/results/qf-aufbv-unsat-core.html | 39 +- .../2022/results/qf-aufbvfp-single-query.html | 71 +- .../2022/results/qf-auflia-incremental.html | 27 +- .../results/qf-auflia-proof-exhibition.html | 27 +- .../2022/results/qf-auflia-single-query.html | 91 +- .../2022/results/qf-auflia-unsat-core.html | 39 +- .../results/qf-aufnia-proof-exhibition.html | 23 +- .../2022/results/qf-aufnia-single-query.html | 71 +- .../2022/results/qf-aufnia-unsat-core.html | 35 +- .../2022/results/qf-ax-proof-exhibition.html | 23 +- archive/2022/results/qf-ax-single-query.html | 101 +- archive/2022/results/qf-ax-unsat-core.html | 39 +- .../2022/results/qf-bitvec-incremental.html | 29 +- .../results/qf-bitvec-model-validation.html | 47 +- .../results/qf-bitvec-proof-exhibition.html | 19 +- .../2022/results/qf-bitvec-single-query.html | 111 +- .../2022/results/qf-bitvec-unsat-core.html | 39 +- archive/2022/results/qf-bv-incremental.html | 29 +- .../2022/results/qf-bv-model-validation.html | 47 +- .../2022/results/qf-bv-proof-exhibition.html | 19 +- archive/2022/results/qf-bv-single-query.html | 111 +- archive/2022/results/qf-bv-unsat-core.html | 39 +- archive/2022/results/qf-bvfp-incremental.html | 25 +- .../results/qf-bvfp-model-validation.html | 31 +- .../results/qf-bvfp-proof-exhibition.html | 19 +- .../2022/results/qf-bvfp-single-query.html | 81 +- archive/2022/results/qf-bvfp-unsat-core.html | 35 +- .../2022/results/qf-bvfplra-incremental.html | 21 +- .../results/qf-bvfplra-model-validation.html | 31 +- .../results/qf-bvfplra-proof-exhibition.html | 19 +- .../2022/results/qf-bvfplra-single-query.html | 81 +- .../2022/results/qf-bvfplra-unsat-core.html | 35 +- .../qf-datatypes-proof-exhibition.html | 23 +- .../results/qf-datatypes-single-query.html | 61 +- .../2022/results/qf-datatypes-unsat-core.html | 31 +- .../2022/results/qf-dt-proof-exhibition.html | 23 +- archive/2022/results/qf-dt-single-query.html | 61 +- archive/2022/results/qf-dt-unsat-core.html | 31 +- .../qf-equality-bitvec-incremental.html | 27 +- .../qf-equality-bitvec-model-validation.html | 39 +- .../qf-equality-bitvec-proof-exhibition.html | 19 +- .../qf-equality-bitvec-single-query.html | 81 +- .../qf-equality-bitvec-unsat-core.html | 39 +- .../2022/results/qf-equality-incremental.html | 29 +- .../qf-equality-lineararith-cloud.html | 43 +- .../qf-equality-lineararith-incremental.html | 29 +- ...equality-lineararith-model-validation.html | 43 +- .../qf-equality-lineararith-parallel.html | 35 +- ...equality-lineararith-proof-exhibition.html | 27 +- .../qf-equality-lineararith-single-query.html | 101 +- .../qf-equality-lineararith-unsat-core.html | 39 +- .../results/qf-equality-model-validation.html | 43 +- ...f-equality-nonlineararith-incremental.html | 27 +- ...ality-nonlineararith-proof-exhibition.html | 23 +- ...-equality-nonlineararith-single-query.html | 91 +- ...qf-equality-nonlineararith-unsat-core.html | 35 +- .../results/qf-equality-proof-exhibition.html | 31 +- .../results/qf-equality-single-query.html | 111 +- .../2022/results/qf-equality-unsat-core.html | 39 +- archive/2022/results/qf-fp-incremental.html | 25 +- .../2022/results/qf-fp-model-validation.html | 31 +- .../2022/results/qf-fp-proof-exhibition.html | 19 +- archive/2022/results/qf-fp-single-query.html | 81 +- archive/2022/results/qf-fp-unsat-core.html | 35 +- .../2022/results/qf-fparith-incremental.html | 25 +- .../results/qf-fparith-model-validation.html | 27 +- .../results/qf-fparith-proof-exhibition.html | 19 +- .../2022/results/qf-fparith-single-query.html | 81 +- .../2022/results/qf-fparith-unsat-core.html | 35 +- .../2022/results/qf-fplra-incremental.html | 23 +- .../results/qf-fplra-model-validation.html | 31 +- .../results/qf-fplra-proof-exhibition.html | 19 +- .../2022/results/qf-fplra-single-query.html | 81 +- archive/2022/results/qf-idl-cloud.html | 43 +- .../2022/results/qf-idl-model-validation.html | 47 +- archive/2022/results/qf-idl-parallel.html | 35 +- .../2022/results/qf-idl-proof-exhibition.html | 27 +- archive/2022/results/qf-idl-single-query.html | 111 +- archive/2022/results/qf-idl-unsat-core.html | 39 +- archive/2022/results/qf-lia-cloud.html | 43 +- archive/2022/results/qf-lia-incremental.html | 29 +- .../2022/results/qf-lia-model-validation.html | 47 +- archive/2022/results/qf-lia-parallel.html | 35 +- .../2022/results/qf-lia-proof-exhibition.html | 31 +- archive/2022/results/qf-lia-single-query.html | 111 +- archive/2022/results/qf-lia-unsat-core.html | 39 +- .../2022/results/qf-linearintarith-cloud.html | 43 +- .../qf-linearintarith-incremental.html | 29 +- .../qf-linearintarith-model-validation.html | 47 +- .../results/qf-linearintarith-parallel.html | 35 +- .../qf-linearintarith-proof-exhibition.html | 31 +- .../qf-linearintarith-single-query.html | 111 +- .../results/qf-linearintarith-unsat-core.html | 39 +- .../results/qf-linearrealarith-cloud.html | 43 +- .../qf-linearrealarith-incremental.html | 31 +- .../qf-linearrealarith-model-validation.html | 43 +- .../results/qf-linearrealarith-parallel.html | 35 +- .../qf-linearrealarith-proof-exhibition.html | 31 +- .../qf-linearrealarith-single-query.html | 111 +- .../qf-linearrealarith-unsat-core.html | 39 +- .../results/qf-lira-model-validation.html | 39 +- .../results/qf-lira-proof-exhibition.html | 27 +- .../2022/results/qf-lira-single-query.html | 91 +- archive/2022/results/qf-lira-unsat-core.html | 39 +- archive/2022/results/qf-lra-cloud.html | 43 +- archive/2022/results/qf-lra-incremental.html | 31 +- .../2022/results/qf-lra-model-validation.html | 43 +- archive/2022/results/qf-lra-parallel.html | 35 +- .../2022/results/qf-lra-proof-exhibition.html | 31 +- archive/2022/results/qf-lra-single-query.html | 111 +- archive/2022/results/qf-lra-unsat-core.html | 39 +- archive/2022/results/qf-nia-incremental.html | 27 +- .../2022/results/qf-nia-proof-exhibition.html | 19 +- archive/2022/results/qf-nia-single-query.html | 111 +- .../results/qf-nira-proof-exhibition.html | 19 +- .../2022/results/qf-nira-single-query.html | 61 +- .../qf-nonlinearintarith-incremental.html | 27 +- ...qf-nonlinearintarith-proof-exhibition.html | 19 +- .../qf-nonlinearintarith-single-query.html | 111 +- ...f-nonlinearrealarith-proof-exhibition.html | 19 +- .../qf-nonlinearrealarith-single-query.html | 121 +- .../2022/results/qf-nra-proof-exhibition.html | 19 +- archive/2022/results/qf-nra-single-query.html | 121 +- archive/2022/results/qf-rdl-cloud.html | 43 +- .../2022/results/qf-rdl-model-validation.html | 43 +- archive/2022/results/qf-rdl-parallel.html | 35 +- .../2022/results/qf-rdl-proof-exhibition.html | 27 +- archive/2022/results/qf-rdl-single-query.html | 111 +- .../2022/results/qf-s-proof-exhibition.html | 19 +- archive/2022/results/qf-s-single-query.html | 71 +- .../results/qf-slia-proof-exhibition.html | 19 +- .../2022/results/qf-slia-single-query.html | 71 +- .../2022/results/qf-snia-single-query.html | 51 +- .../results/qf-strings-proof-exhibition.html | 19 +- .../2022/results/qf-strings-single-query.html | 71 +- archive/2022/results/qf-uf-incremental.html | 29 +- .../2022/results/qf-uf-model-validation.html | 43 +- .../2022/results/qf-uf-proof-exhibition.html | 31 +- archive/2022/results/qf-uf-single-query.html | 101 +- archive/2022/results/qf-uf-unsat-core.html | 39 +- archive/2022/results/qf-ufbv-incremental.html | 27 +- .../results/qf-ufbv-model-validation.html | 39 +- .../results/qf-ufbv-proof-exhibition.html | 19 +- .../2022/results/qf-ufbv-single-query.html | 81 +- archive/2022/results/qf-ufbv-unsat-core.html | 39 +- .../results/qf-ufdt-proof-exhibition.html | 23 +- .../2022/results/qf-ufdt-single-query.html | 61 +- archive/2022/results/qf-ufdt-unsat-core.html | 31 +- .../results/qf-ufdtlira-proof-exhibition.html | 23 +- .../results/qf-ufdtlira-single-query.html | 61 +- .../2022/results/qf-ufdtlira-unsat-core.html | 27 +- archive/2022/results/qf-uffp-incremental.html | 23 +- .../results/qf-uffp-proof-exhibition.html | 19 +- archive/2022/results/qf-uffp-unsat-core.html | 23 +- .../qf-uffpdtnira-proof-exhibition.html | 19 +- .../results/qf-ufidl-model-validation.html | 39 +- .../results/qf-ufidl-proof-exhibition.html | 27 +- .../2022/results/qf-ufidl-single-query.html | 91 +- archive/2022/results/qf-ufidl-unsat-core.html | 39 +- .../2022/results/qf-uflia-incremental.html | 29 +- .../results/qf-uflia-model-validation.html | 43 +- .../results/qf-uflia-proof-exhibition.html | 27 +- .../2022/results/qf-uflia-single-query.html | 101 +- archive/2022/results/qf-uflia-unsat-core.html | 39 +- archive/2022/results/qf-uflra-cloud.html | 43 +- .../2022/results/qf-uflra-incremental.html | 29 +- .../results/qf-uflra-model-validation.html | 43 +- archive/2022/results/qf-uflra-parallel.html | 35 +- .../results/qf-uflra-proof-exhibition.html | 27 +- .../2022/results/qf-uflra-single-query.html | 101 +- archive/2022/results/qf-uflra-unsat-core.html | 39 +- .../2022/results/qf-ufnia-incremental.html | 27 +- .../results/qf-ufnia-proof-exhibition.html | 23 +- .../2022/results/qf-ufnia-single-query.html | 81 +- archive/2022/results/qf-ufnia-unsat-core.html | 35 +- .../2022/results/qf-ufnra-incremental.html | 25 +- .../results/qf-ufnra-proof-exhibition.html | 23 +- .../2022/results/qf-ufnra-single-query.html | 91 +- archive/2022/results/qf-ufnra-unsat-core.html | 35 +- archive/2022/results/results-cloud.html | 7 +- archive/2022/results/results-incremental.html | 7 +- .../results/results-model-validation.html | 7 +- archive/2022/results/results-parallel.html | 7 +- .../2022/results/results-single-query.html | 7 +- archive/2022/results/results-unsat-core.html | 7 +- archive/2022/results/uf-cloud.html | 27 +- archive/2022/results/uf-incremental.html | 25 +- archive/2022/results/uf-parallel.html | 19 +- archive/2022/results/uf-proof-exhibition.html | 27 +- archive/2022/results/uf-single-query.html | 101 +- archive/2022/results/uf-unsat-core.html | 39 +- .../2022/results/ufbv-proof-exhibition.html | 19 +- archive/2022/results/ufbv-single-query.html | 71 +- .../2022/results/ufbvfp-proof-exhibition.html | 19 +- archive/2022/results/ufbvfp-single-query.html | 71 +- .../results/ufbvlia-proof-exhibition.html | 19 +- .../2022/results/ufbvlia-single-query.html | 61 +- archive/2022/results/ufdt-cloud.html | 27 +- archive/2022/results/ufdt-parallel.html | 19 +- .../2022/results/ufdt-proof-exhibition.html | 23 +- archive/2022/results/ufdt-single-query.html | 81 +- archive/2022/results/ufdt-unsat-core.html | 39 +- archive/2022/results/ufdtlia-cloud.html | 27 +- archive/2022/results/ufdtlia-parallel.html | 19 +- .../results/ufdtlia-proof-exhibition.html | 23 +- .../2022/results/ufdtlia-single-query.html | 81 +- archive/2022/results/ufdtlia-unsat-core.html | 39 +- archive/2022/results/ufdtlira-cloud.html | 27 +- archive/2022/results/ufdtlira-parallel.html | 19 +- .../results/ufdtlira-proof-exhibition.html | 23 +- .../2022/results/ufdtlira-single-query.html | 81 +- archive/2022/results/ufdtlira-unsat-core.html | 39 +- archive/2022/results/ufdtnia-incremental.html | 21 +- .../results/ufdtnia-proof-exhibition.html | 19 +- .../2022/results/ufdtnia-single-query.html | 71 +- archive/2022/results/ufdtnia-unsat-core.html | 35 +- archive/2022/results/ufdtnira-cloud.html | 27 +- archive/2022/results/ufdtnira-parallel.html | 19 +- .../results/ufdtnira-proof-exhibition.html | 19 +- .../2022/results/ufdtnira-single-query.html | 71 +- archive/2022/results/ufdtnira-unsat-core.html | 35 +- .../results/uffpdtnira-proof-exhibition.html | 19 +- .../2022/results/uffpdtnira-single-query.html | 61 +- .../2022/results/uffpdtnira-unsat-core.html | 31 +- .../2022/results/ufidl-proof-exhibition.html | 27 +- archive/2022/results/ufidl-single-query.html | 91 +- archive/2022/results/ufidl-unsat-core.html | 39 +- archive/2022/results/uflia-cloud.html | 27 +- archive/2022/results/uflia-parallel.html | 19 +- .../2022/results/uflia-proof-exhibition.html | 27 +- archive/2022/results/uflia-single-query.html | 91 +- archive/2022/results/uflia-unsat-core.html | 39 +- archive/2022/results/uflra-incremental.html | 25 +- .../2022/results/uflra-proof-exhibition.html | 27 +- archive/2022/results/uflra-single-query.html | 91 +- archive/2022/results/uflra-unsat-core.html | 39 +- archive/2022/results/ufnia-cloud.html | 27 +- archive/2022/results/ufnia-incremental.html | 25 +- archive/2022/results/ufnia-parallel.html | 19 +- .../2022/results/ufnia-proof-exhibition.html | 19 +- archive/2022/results/ufnia-single-query.html | 71 +- archive/2022/results/ufnia-unsat-core.html | 35 +- archive/2022/results/ufnra-incremental.html | 25 +- archive/2022/slides.html | 7 +- archive/2022/specs.html | 7 +- archive/2022/stats.html | 7 +- archive/2022/tools.html | 55 +- archive/2023/benchmarks.html | 7 +- archive/2023/comparisons.html | 7 +- archive/2023/divisions/arith.html | 7 +- archive/2023/divisions/bitvec.html | 7 +- .../2023/divisions/equality-lineararith.html | 7 +- .../2023/divisions/equality-machinearith.html | 7 +- .../divisions/equality-nonlineararith.html | 7 +- archive/2023/divisions/equality.html | 7 +- archive/2023/divisions/fparith.html | 7 +- archive/2023/divisions/qf-adt-bitvec.html | 7 +- archive/2023/divisions/qf-adt-linarith.html | 7 +- .../divisions/qf-array-bitvec-linarith.html | 7 +- archive/2023/divisions/qf-bitvec.html | 7 +- .../qf-datatypes-bitvec-linarith.html | 7 +- archive/2023/divisions/qf-datatypes.html | 7 +- .../divisions/qf-equality-bitvec-arith.html | 7 +- .../2023/divisions/qf-equality-bitvec.html | 7 +- .../divisions/qf-equality-lineararith.html | 7 +- .../divisions/qf-equality-nonlineararith.html | 7 +- archive/2023/divisions/qf-equality.html | 7 +- archive/2023/divisions/qf-fparith.html | 7 +- archive/2023/divisions/qf-linearintarith.html | 7 +- .../2023/divisions/qf-linearrealarith.html | 7 +- .../2023/divisions/qf-nonlinearintarith.html | 7 +- .../2023/divisions/qf-nonlinearrealarith.html | 7 +- archive/2023/divisions/qf-strings.html | 7 +- archive/2023/index.html | 7 +- archive/2023/model.html | 7 +- .../news/2023-02-07-call-for-benchmark.html | 9 +- .../news/2023-02-07-call-for-solvers.html | 7 +- .../2023-04-28-final-call-for-solvers.html | 7 +- .../news/2023-05-20-preliminary-solvers.html | 7 +- .../2023/news/2023-06-05-jobs-running.html | 7 +- .../news/2023-07-06-competition-results.html | 7 +- archive/2023/parallel-and-cloud-tracks.html | 7 +- archive/2023/participants.html | 7 +- .../2018-mathsat-incremental.html | 8 +- archive/2023/participants/2019-par4.html | 8 +- archive/2023/participants/2019-z3.html | 8 +- archive/2023/participants/2020-bitwuzla.html | 8 +- archive/2023/participants/2020-cvc4-uc.html | 8 +- archive/2023/participants/2020-cvc4.html | 8 +- archive/2023/participants/2020-yices2.html | 8 +- archive/2023/participants/2020-z3.html | 8 +- archive/2023/participants/2021-cvc5-inc.html | 8 +- archive/2023/participants/2021-cvc5-uc.html | 8 +- archive/2023/participants/2021-mathsat5.html | 8 +- .../participants/2021-yices2-incremental.html | 8 +- .../2021-yices2-model-validation.html | 8 +- archive/2023/participants/2021-yices2.html | 8 +- archive/2023/participants/2021-z3.html | 8 +- archive/2023/participants/2022-bitwuzla.html | 8 +- archive/2023/participants/2022-cvc5.html | 8 +- archive/2023/participants/2022-mathsat.html | 8 +- archive/2023/participants/2022-opensmt.html | 8 +- .../2023/participants/2022-smtinterpol.html | 8 +- archive/2023/participants/2022-stp-fixed.html | 8 +- .../2022-ultimateeliminator-mathsat.html | 8 +- archive/2023/participants/2022-vampire.html | 8 +- archive/2023/participants/2022-yices2.html | 8 +- archive/2023/participants/2022-z3-4-8-17.html | 8 +- archive/2023/participants/2022-z3-fixed.html | 8 +- archive/2023/participants/2022-z3.html | 8 +- archive/2023/participants/bitwuzla-fixed.html | 8 +- archive/2023/participants/bitwuzla.html | 8 +- archive/2023/participants/colibri.html | 8 +- archive/2023/participants/cvc5-lfsc.html | 8 +- archive/2023/participants/cvc5-nra-ls.html | 8 +- archive/2023/participants/cvc5.html | 8 +- archive/2023/participants/iprover-fixed.html | 8 +- archive/2023/participants/iprover.html | 8 +- archive/2023/participants/ismt.html | 8 +- archive/2023/participants/opensmt.html | 8 +- archive/2023/participants/ostrich-fixed.html | 8 +- archive/2023/participants/ostrich.html | 8 +- archive/2023/participants/q3b-pbdd.html | 8 +- archive/2023/participants/q3b.html | 8 +- archive/2023/participants/smt-rat-mcsat.html | 8 +- archive/2023/participants/smtinterpol.html | 8 +- archive/2023/participants/stp.html | 8 +- .../ultimateeliminator-mathsat.html | 8 +- ...ltimateintblastingwrapper-smtinterpol.html | 8 +- archive/2023/participants/vampire.html | 8 +- archive/2023/participants/yaga.html | 8 +- archive/2023/participants/yices-ismt.html | 8 +- archive/2023/participants/yices2-fixed.html | 8 +- archive/2023/participants/yices2.html | 8 +- archive/2023/participants/yicesqs.html | 8 +- archive/2023/participants/z3-alpha.html | 8 +- .../2023/participants/z3-noodler-fixed.html | 8 +- archive/2023/participants/z3-noodler.html | 8 +- archive/2023/participants/z3-owl-fixed.html | 8 +- archive/2023/participants/z3-owl.html | 8 +- archive/2023/participants/z3.html | 8 +- archive/2023/proof-track.html | 7 +- archive/2023/results.html | 241 ++-- .../2023/results/abv-proof-exhibition.html | 19 +- archive/2023/results/abv-single-query.html | 81 +- archive/2023/results/abv-unsat-core.html | 35 +- .../2023/results/abvfp-proof-exhibition.html | 19 +- archive/2023/results/abvfp-single-query.html | 71 +- archive/2023/results/abvfp-unsat-core.html | 35 +- .../2023/results/abvfplra-incremental.html | 23 +- .../results/abvfplra-proof-exhibition.html | 19 +- .../2023/results/abvfplra-single-query.html | 71 +- archive/2023/results/abvfplra-unsat-core.html | 35 +- archive/2023/results/alia-incremental.html | 23 +- .../2023/results/alia-proof-exhibition.html | 23 +- archive/2023/results/alia-single-query.html | 91 +- archive/2023/results/alia-unsat-core.html | 35 +- archive/2023/results/ania-incremental.html | 21 +- .../2023/results/ania-proof-exhibition.html | 19 +- archive/2023/results/ania-single-query.html | 71 +- archive/2023/results/ania-unsat-core.html | 27 +- archive/2023/results/arith-cloud.html | 27 +- archive/2023/results/arith-incremental.html | 23 +- archive/2023/results/arith-parallel.html | 27 +- .../2023/results/arith-proof-exhibition.html | 23 +- archive/2023/results/arith-single-query.html | 101 +- archive/2023/results/arith-unsat-core.html | 35 +- .../2023/results/aufbv-proof-exhibition.html | 19 +- archive/2023/results/aufbv-single-query.html | 81 +- .../results/aufbvdtlia-proof-exhibition.html | 19 +- .../2023/results/aufbvdtlia-single-query.html | 51 +- .../results/aufbvdtnira-proof-exhibition.html | 19 +- .../results/aufbvfp-proof-exhibition.html | 19 +- .../2023/results/aufbvfp-single-query.html | 71 +- .../results/aufdtlia-proof-exhibition.html | 23 +- .../2023/results/aufdtlia-single-query.html | 81 +- archive/2023/results/aufdtlia-unsat-core.html | 31 +- archive/2023/results/aufdtlira-cloud.html | 27 +- archive/2023/results/aufdtlira-parallel.html | 27 +- .../results/aufdtlira-proof-exhibition.html | 23 +- .../2023/results/aufdtlira-single-query.html | 81 +- .../2023/results/aufdtlira-unsat-core.html | 31 +- archive/2023/results/aufdtnira-cloud.html | 27 +- archive/2023/results/aufdtnira-parallel.html | 27 +- .../results/aufdtnira-proof-exhibition.html | 19 +- .../2023/results/aufdtnira-single-query.html | 71 +- .../2023/results/aufdtnira-unsat-core.html | 27 +- .../results/auffpdtnira-proof-exhibition.html | 19 +- archive/2023/results/auflia-cloud.html | 27 +- archive/2023/results/auflia-parallel.html | 27 +- .../2023/results/auflia-proof-exhibition.html | 23 +- archive/2023/results/auflia-single-query.html | 91 +- archive/2023/results/auflia-unsat-core.html | 35 +- archive/2023/results/auflira-cloud.html | 27 +- archive/2023/results/auflira-parallel.html | 27 +- .../results/auflira-proof-exhibition.html | 23 +- .../2023/results/auflira-single-query.html | 91 +- archive/2023/results/auflira-unsat-core.html | 35 +- .../2023/results/aufnia-proof-exhibition.html | 19 +- archive/2023/results/aufnia-single-query.html | 81 +- archive/2023/results/aufnia-unsat-core.html | 31 +- archive/2023/results/aufnira-cloud.html | 27 +- archive/2023/results/aufnira-incremental.html | 23 +- archive/2023/results/aufnira-parallel.html | 27 +- .../results/aufnira-proof-exhibition.html | 19 +- .../2023/results/aufnira-single-query.html | 81 +- archive/2023/results/aufnira-unsat-core.html | 31 +- archive/2023/results/biggest-lead-cloud.html | 15 +- .../results/biggest-lead-incremental.html | 69 +- .../biggest-lead-model-validation.html | 95 +- .../2023/results/biggest-lead-parallel.html | 15 +- .../results/biggest-lead-single-query.html | 257 ++-- .../2023/results/biggest-lead-unsat-core.html | 107 +- archive/2023/results/bitvec-incremental.html | 23 +- .../2023/results/bitvec-proof-exhibition.html | 19 +- archive/2023/results/bitvec-single-query.html | 101 +- archive/2023/results/bitvec-unsat-core.html | 35 +- archive/2023/results/bv-incremental.html | 23 +- archive/2023/results/bv-proof-exhibition.html | 19 +- archive/2023/results/bv-single-query.html | 101 +- archive/2023/results/bv-unsat-core.html | 35 +- archive/2023/results/bvfp-incremental.html | 23 +- .../2023/results/bvfp-proof-exhibition.html | 19 +- archive/2023/results/bvfp-single-query.html | 71 +- archive/2023/results/bvfp-unsat-core.html | 35 +- archive/2023/results/bvfplra-incremental.html | 23 +- .../results/bvfplra-proof-exhibition.html | 19 +- .../2023/results/bvfplra-single-query.html | 71 +- archive/2023/results/bvfplra-unsat-core.html | 35 +- archive/2023/results/equality-cloud.html | 27 +- .../2023/results/equality-incremental.html | 27 +- .../results/equality-lineararith-cloud.html | 27 +- .../equality-lineararith-incremental.html | 23 +- .../equality-lineararith-parallel.html | 27 +- ...equality-lineararith-proof-exhibition.html | 23 +- .../equality-lineararith-single-query.html | 91 +- .../equality-lineararith-unsat-core.html | 35 +- .../equality-machinearith-incremental.html | 23 +- ...quality-machinearith-proof-exhibition.html | 19 +- .../equality-machinearith-single-query.html | 81 +- .../equality-machinearith-unsat-core.html | 35 +- .../equality-nonlineararith-cloud.html | 27 +- .../equality-nonlineararith-incremental.html | 23 +- .../equality-nonlineararith-parallel.html | 27 +- ...ality-nonlineararith-proof-exhibition.html | 19 +- .../equality-nonlineararith-single-query.html | 81 +- .../equality-nonlineararith-unsat-core.html | 31 +- archive/2023/results/equality-parallel.html | 27 +- .../results/equality-proof-exhibition.html | 23 +- .../2023/results/equality-single-query.html | 101 +- archive/2023/results/equality-unsat-core.html | 43 +- archive/2023/results/fp-proof-exhibition.html | 19 +- archive/2023/results/fp-single-query.html | 71 +- archive/2023/results/fparith-incremental.html | 23 +- .../results/fparith-proof-exhibition.html | 19 +- .../2023/results/fparith-single-query.html | 71 +- archive/2023/results/fparith-unsat-core.html | 35 +- archive/2023/results/fplra-single-query.html | 71 +- .../largest-contribution-incremental.html | 65 +- ...largest-contribution-model-validation.html | 83 +- .../largest-contribution-single-query.html | 237 ++-- .../largest-contribution-unsat-core.html | 83 +- archive/2023/results/lia-incremental.html | 23 +- .../2023/results/lia-proof-exhibition.html | 23 +- archive/2023/results/lia-single-query.html | 101 +- archive/2023/results/lia-unsat-core.html | 35 +- archive/2023/results/lra-cloud.html | 27 +- archive/2023/results/lra-incremental.html | 23 +- archive/2023/results/lra-parallel.html | 27 +- .../2023/results/lra-proof-exhibition.html | 23 +- archive/2023/results/lra-single-query.html | 101 +- .../2023/results/nia-proof-exhibition.html | 19 +- archive/2023/results/nia-single-query.html | 91 +- archive/2023/results/nia-unsat-core.html | 31 +- archive/2023/results/nra-cloud.html | 27 +- archive/2023/results/nra-parallel.html | 27 +- .../2023/results/nra-proof-exhibition.html | 19 +- archive/2023/results/nra-single-query.html | 91 +- archive/2023/results/qf-abv-incremental.html | 25 +- .../2023/results/qf-abv-model-validation.html | 31 +- .../2023/results/qf-abv-proof-exhibition.html | 19 +- archive/2023/results/qf-abv-single-query.html | 101 +- archive/2023/results/qf-abv-unsat-core.html | 35 +- .../2023/results/qf-abvfp-incremental.html | 21 +- .../results/qf-abvfp-model-validation.html | 27 +- .../results/qf-abvfp-proof-exhibition.html | 19 +- .../2023/results/qf-abvfp-single-query.html | 71 +- archive/2023/results/qf-abvfp-unsat-core.html | 31 +- .../2023/results/qf-abvfplra-incremental.html | 21 +- .../results/qf-abvfplra-model-validation.html | 27 +- .../results/qf-abvfplra-proof-exhibition.html | 19 +- .../results/qf-abvfplra-single-query.html | 71 +- .../2023/results/qf-abvfplra-unsat-core.html | 31 +- .../qf-adt-bitvec-model-validation.html | 27 +- .../qf-adt-linarith-model-validation.html | 23 +- archive/2023/results/qf-alia-incremental.html | 27 +- .../results/qf-alia-model-validation.html | 27 +- .../results/qf-alia-proof-exhibition.html | 23 +- .../2023/results/qf-alia-single-query.html | 71 +- archive/2023/results/qf-alia-unsat-core.html | 31 +- archive/2023/results/qf-ania-incremental.html | 25 +- .../results/qf-ania-model-validation.html | 23 +- .../results/qf-ania-proof-exhibition.html | 23 +- .../2023/results/qf-ania-single-query.html | 61 +- archive/2023/results/qf-ania-unsat-core.html | 27 +- ...rray-bitvec-linarith-model-validation.html | 31 +- .../2023/results/qf-aufbv-incremental.html | 25 +- .../results/qf-aufbv-model-validation.html | 31 +- .../results/qf-aufbv-proof-exhibition.html | 19 +- .../2023/results/qf-aufbv-single-query.html | 101 +- archive/2023/results/qf-aufbv-unsat-core.html | 35 +- .../results/qf-aufbvfp-model-validation.html | 27 +- .../2023/results/qf-aufbvfp-single-query.html | 71 +- .../2023/results/qf-aufbvlia-incremental.html | 21 +- .../2023/results/qf-aufbvnia-incremental.html | 21 +- .../2023/results/qf-auflia-incremental.html | 27 +- .../results/qf-auflia-model-validation.html | 27 +- .../results/qf-auflia-proof-exhibition.html | 23 +- .../2023/results/qf-auflia-single-query.html | 71 +- .../2023/results/qf-auflia-unsat-core.html | 31 +- .../results/qf-aufnia-model-validation.html | 23 +- .../results/qf-aufnia-proof-exhibition.html | 23 +- .../2023/results/qf-aufnia-single-query.html | 61 +- .../2023/results/qf-aufnia-unsat-core.html | 27 +- .../2023/results/qf-ax-model-validation.html | 27 +- .../2023/results/qf-ax-proof-exhibition.html | 23 +- archive/2023/results/qf-ax-single-query.html | 71 +- archive/2023/results/qf-ax-unsat-core.html | 31 +- .../2023/results/qf-bitvec-incremental.html | 27 +- .../results/qf-bitvec-model-validation.html | 39 +- archive/2023/results/qf-bitvec-parallel.html | 19 +- .../results/qf-bitvec-proof-exhibition.html | 19 +- .../2023/results/qf-bitvec-single-query.html | 111 +- .../2023/results/qf-bitvec-unsat-core.html | 35 +- archive/2023/results/qf-bv-incremental.html | 27 +- .../2023/results/qf-bv-model-validation.html | 39 +- archive/2023/results/qf-bv-parallel.html | 19 +- .../2023/results/qf-bv-proof-exhibition.html | 19 +- archive/2023/results/qf-bv-single-query.html | 111 +- archive/2023/results/qf-bv-unsat-core.html | 35 +- archive/2023/results/qf-bvfp-incremental.html | 21 +- .../results/qf-bvfp-model-validation.html | 31 +- .../results/qf-bvfp-proof-exhibition.html | 19 +- .../2023/results/qf-bvfp-single-query.html | 91 +- archive/2023/results/qf-bvfp-unsat-core.html | 31 +- .../2023/results/qf-bvfplra-incremental.html | 21 +- .../results/qf-bvfplra-model-validation.html | 31 +- .../results/qf-bvfplra-proof-exhibition.html | 19 +- .../2023/results/qf-bvfplra-single-query.html | 71 +- .../2023/results/qf-bvfplra-unsat-core.html | 31 +- .../qf-datatypes-model-validation.html | 19 +- .../qf-datatypes-proof-exhibition.html | 23 +- .../results/qf-datatypes-single-query.html | 51 +- .../2023/results/qf-datatypes-unsat-core.html | 27 +- .../2023/results/qf-dt-model-validation.html | 23 +- .../2023/results/qf-dt-proof-exhibition.html | 23 +- archive/2023/results/qf-dt-single-query.html | 51 +- archive/2023/results/qf-dt-unsat-core.html | 27 +- .../qf-equality-bitvec-arith-incremental.html | 21 +- .../qf-equality-bitvec-incremental.html | 25 +- .../qf-equality-bitvec-model-validation.html | 35 +- .../qf-equality-bitvec-proof-exhibition.html | 19 +- .../qf-equality-bitvec-single-query.html | 101 +- .../qf-equality-bitvec-unsat-core.html | 35 +- .../2023/results/qf-equality-incremental.html | 27 +- .../qf-equality-lineararith-incremental.html | 27 +- ...equality-lineararith-model-validation.html | 35 +- ...equality-lineararith-proof-exhibition.html | 23 +- .../qf-equality-lineararith-single-query.html | 71 +- .../qf-equality-lineararith-unsat-core.html | 31 +- .../results/qf-equality-model-validation.html | 39 +- ...f-equality-nonlineararith-incremental.html | 25 +- ...ality-nonlineararith-model-validation.html | 19 +- ...ality-nonlineararith-proof-exhibition.html | 23 +- ...-equality-nonlineararith-single-query.html | 61 +- ...qf-equality-nonlineararith-unsat-core.html | 27 +- .../results/qf-equality-proof-exhibition.html | 23 +- .../results/qf-equality-single-query.html | 71 +- .../2023/results/qf-equality-unsat-core.html | 31 +- archive/2023/results/qf-fp-incremental.html | 21 +- .../2023/results/qf-fp-model-validation.html | 31 +- .../2023/results/qf-fp-proof-exhibition.html | 19 +- archive/2023/results/qf-fp-single-query.html | 91 +- archive/2023/results/qf-fp-unsat-core.html | 31 +- .../2023/results/qf-fparith-incremental.html | 21 +- .../results/qf-fparith-model-validation.html | 27 +- .../results/qf-fparith-proof-exhibition.html | 19 +- .../2023/results/qf-fparith-single-query.html | 91 +- .../2023/results/qf-fparith-unsat-core.html | 31 +- .../results/qf-fplra-model-validation.html | 31 +- .../results/qf-fplra-proof-exhibition.html | 19 +- .../2023/results/qf-fplra-single-query.html | 71 +- .../2023/results/qf-idl-model-validation.html | 39 +- .../2023/results/qf-idl-proof-exhibition.html | 23 +- archive/2023/results/qf-idl-single-query.html | 81 +- archive/2023/results/qf-idl-unsat-core.html | 31 +- archive/2023/results/qf-lia-incremental.html | 27 +- .../2023/results/qf-lia-model-validation.html | 39 +- .../2023/results/qf-lia-proof-exhibition.html | 23 +- archive/2023/results/qf-lia-single-query.html | 81 +- archive/2023/results/qf-lia-unsat-core.html | 31 +- .../qf-linearintarith-incremental.html | 27 +- .../qf-linearintarith-model-validation.html | 39 +- .../qf-linearintarith-proof-exhibition.html | 23 +- .../qf-linearintarith-single-query.html | 81 +- .../results/qf-linearintarith-unsat-core.html | 31 +- .../qf-linearrealarith-incremental.html | 27 +- .../qf-linearrealarith-model-validation.html | 39 +- .../qf-linearrealarith-proof-exhibition.html | 23 +- .../qf-linearrealarith-single-query.html | 91 +- .../qf-linearrealarith-unsat-core.html | 31 +- .../results/qf-lira-model-validation.html | 27 +- .../results/qf-lira-proof-exhibition.html | 23 +- .../2023/results/qf-lira-single-query.html | 61 +- archive/2023/results/qf-lira-unsat-core.html | 31 +- archive/2023/results/qf-lra-incremental.html | 27 +- .../2023/results/qf-lra-model-validation.html | 39 +- .../2023/results/qf-lra-proof-exhibition.html | 23 +- archive/2023/results/qf-lra-single-query.html | 91 +- archive/2023/results/qf-lra-unsat-core.html | 31 +- archive/2023/results/qf-nia-incremental.html | 25 +- .../2023/results/qf-nia-model-validation.html | 31 +- .../2023/results/qf-nia-proof-exhibition.html | 19 +- archive/2023/results/qf-nia-single-query.html | 81 +- .../results/qf-nira-proof-exhibition.html | 19 +- .../2023/results/qf-nira-single-query.html | 41 +- .../qf-nonlinearintarith-incremental.html | 25 +- ...qf-nonlinearintarith-model-validation.html | 27 +- ...qf-nonlinearintarith-proof-exhibition.html | 19 +- .../qf-nonlinearintarith-single-query.html | 81 +- ...f-nonlinearrealarith-model-validation.html | 31 +- ...f-nonlinearrealarith-proof-exhibition.html | 19 +- .../qf-nonlinearrealarith-single-query.html | 101 +- .../2023/results/qf-nra-model-validation.html | 35 +- .../2023/results/qf-nra-proof-exhibition.html | 19 +- archive/2023/results/qf-nra-single-query.html | 101 +- .../2023/results/qf-rdl-model-validation.html | 35 +- .../2023/results/qf-rdl-proof-exhibition.html | 23 +- archive/2023/results/qf-rdl-single-query.html | 81 +- .../2023/results/qf-s-proof-exhibition.html | 19 +- archive/2023/results/qf-s-single-query.html | 91 +- .../results/qf-slia-proof-exhibition.html | 19 +- .../2023/results/qf-slia-single-query.html | 91 +- .../2023/results/qf-snia-single-query.html | 71 +- .../results/qf-strings-proof-exhibition.html | 19 +- .../2023/results/qf-strings-single-query.html | 91 +- archive/2023/results/qf-uf-incremental.html | 27 +- .../2023/results/qf-uf-model-validation.html | 39 +- .../2023/results/qf-uf-proof-exhibition.html | 23 +- archive/2023/results/qf-uf-single-query.html | 71 +- archive/2023/results/qf-uf-unsat-core.html | 31 +- archive/2023/results/qf-ufbv-incremental.html | 25 +- .../results/qf-ufbv-model-validation.html | 35 +- .../results/qf-ufbv-proof-exhibition.html | 19 +- .../2023/results/qf-ufbv-single-query.html | 101 +- archive/2023/results/qf-ufbv-unsat-core.html | 35 +- .../results/qf-ufbvdt-proof-exhibition.html | 19 +- .../2023/results/qf-ufbvdt-single-query.html | 41 +- .../2023/results/qf-ufbvlia-incremental.html | 21 +- .../results/qf-ufdt-proof-exhibition.html | 23 +- .../2023/results/qf-ufdt-single-query.html | 51 +- archive/2023/results/qf-ufdt-unsat-core.html | 27 +- .../results/qf-ufdtlia-proof-exhibition.html | 23 +- .../2023/results/qf-ufdtlia-single-query.html | 41 +- .../2023/results/qf-ufdtlia-unsat-core.html | 23 +- .../results/qf-ufdtlira-proof-exhibition.html | 23 +- .../results/qf-ufdtlira-single-query.html | 51 +- .../2023/results/qf-ufdtlira-unsat-core.html | 23 +- archive/2023/results/qf-uffp-incremental.html | 21 +- .../results/qf-uffp-proof-exhibition.html | 19 +- .../qf-uffpdtnira-proof-exhibition.html | 19 +- .../results/qf-ufidl-model-validation.html | 35 +- .../results/qf-ufidl-proof-exhibition.html | 23 +- .../2023/results/qf-ufidl-single-query.html | 71 +- archive/2023/results/qf-ufidl-unsat-core.html | 31 +- .../2023/results/qf-uflia-incremental.html | 27 +- .../results/qf-uflia-model-validation.html | 35 +- .../results/qf-uflia-proof-exhibition.html | 23 +- .../2023/results/qf-uflia-single-query.html | 71 +- archive/2023/results/qf-uflia-unsat-core.html | 31 +- .../2023/results/qf-uflra-incremental.html | 27 +- .../results/qf-uflra-model-validation.html | 35 +- .../results/qf-uflra-proof-exhibition.html | 23 +- .../2023/results/qf-uflra-single-query.html | 71 +- archive/2023/results/qf-uflra-unsat-core.html | 31 +- .../2023/results/qf-ufnia-incremental.html | 25 +- .../results/qf-ufnia-model-validation.html | 23 +- .../results/qf-ufnia-proof-exhibition.html | 23 +- .../2023/results/qf-ufnia-single-query.html | 61 +- archive/2023/results/qf-ufnia-unsat-core.html | 27 +- .../2023/results/qf-ufnra-incremental.html | 25 +- .../results/qf-ufnra-model-validation.html | 23 +- .../results/qf-ufnra-proof-exhibition.html | 23 +- .../2023/results/qf-ufnra-single-query.html | 61 +- archive/2023/results/qf-ufnra-unsat-core.html | 27 +- archive/2023/results/results-cloud.html | 7 +- archive/2023/results/results-incremental.html | 7 +- .../results/results-model-validation.html | 7 +- archive/2023/results/results-parallel.html | 7 +- .../2023/results/results-single-query.html | 7 +- archive/2023/results/results-unsat-core.html | 7 +- archive/2023/results/uf-cloud.html | 27 +- archive/2023/results/uf-incremental.html | 27 +- archive/2023/results/uf-parallel.html | 27 +- archive/2023/results/uf-proof-exhibition.html | 23 +- archive/2023/results/uf-single-query.html | 101 +- archive/2023/results/uf-unsat-core.html | 43 +- .../2023/results/ufbv-proof-exhibition.html | 19 +- archive/2023/results/ufbv-single-query.html | 81 +- archive/2023/results/ufbvdt-single-query.html | 41 +- .../2023/results/ufbvfp-proof-exhibition.html | 19 +- archive/2023/results/ufbvfp-single-query.html | 71 +- .../results/ufbvlia-proof-exhibition.html | 19 +- .../2023/results/ufbvlia-single-query.html | 61 +- archive/2023/results/ufdt-cloud.html | 27 +- archive/2023/results/ufdt-parallel.html | 27 +- .../2023/results/ufdt-proof-exhibition.html | 23 +- archive/2023/results/ufdt-single-query.html | 81 +- archive/2023/results/ufdt-unsat-core.html | 35 +- archive/2023/results/ufdtlia-cloud.html | 27 +- archive/2023/results/ufdtlia-parallel.html | 27 +- .../results/ufdtlia-proof-exhibition.html | 23 +- .../2023/results/ufdtlia-single-query.html | 81 +- archive/2023/results/ufdtlia-unsat-core.html | 31 +- archive/2023/results/ufdtlira-cloud.html | 27 +- archive/2023/results/ufdtlira-parallel.html | 27 +- .../results/ufdtlira-proof-exhibition.html | 23 +- .../2023/results/ufdtlira-single-query.html | 81 +- archive/2023/results/ufdtlira-unsat-core.html | 31 +- archive/2023/results/ufdtnia-incremental.html | 19 +- .../results/ufdtnia-proof-exhibition.html | 19 +- .../2023/results/ufdtnia-single-query.html | 71 +- archive/2023/results/ufdtnia-unsat-core.html | 27 +- archive/2023/results/ufdtnira-cloud.html | 27 +- archive/2023/results/ufdtnira-parallel.html | 27 +- .../results/ufdtnira-proof-exhibition.html | 19 +- .../2023/results/ufdtnira-single-query.html | 71 +- archive/2023/results/ufdtnira-unsat-core.html | 27 +- .../results/uffpdtnira-proof-exhibition.html | 19 +- archive/2023/results/ufidl-parallel.html | 27 +- .../2023/results/ufidl-proof-exhibition.html | 23 +- archive/2023/results/ufidl-single-query.html | 91 +- archive/2023/results/ufidl-unsat-core.html | 35 +- archive/2023/results/uflia-cloud.html | 27 +- archive/2023/results/uflia-parallel.html | 27 +- .../2023/results/uflia-proof-exhibition.html | 23 +- archive/2023/results/uflia-single-query.html | 91 +- archive/2023/results/uflia-unsat-core.html | 35 +- archive/2023/results/uflra-incremental.html | 23 +- .../2023/results/uflra-proof-exhibition.html | 23 +- archive/2023/results/uflra-single-query.html | 91 +- archive/2023/results/uflra-unsat-core.html | 35 +- archive/2023/results/ufnia-cloud.html | 27 +- archive/2023/results/ufnia-incremental.html | 23 +- archive/2023/results/ufnia-parallel.html | 27 +- .../2023/results/ufnia-proof-exhibition.html | 19 +- archive/2023/results/ufnia-single-query.html | 81 +- archive/2023/results/ufnia-unsat-core.html | 31 +- archive/2023/results/ufnra-incremental.html | 23 +- archive/2023/slides.html | 7 +- archive/2023/specs.html | 7 +- archive/2023/stats.html | 7 +- archive/assets/js/scale.fix.js | 2 +- archive/js/jquery.tablesorter.js | 106 +- codecov.yaml | 9 + poetry.lock | 1114 +++++++++++++++++ poetry.toml | 2 + pyproject.toml | 123 ++ smtcomp/__init__.py | 0 smtcomp/archive.py | 103 ++ smtcomp/benchexec.py | 98 ++ smtcomp/benchmarks.py | 212 ++++ smtcomp/convert_csv.py | 177 +++ smtcomp/defs.py | 1095 ++++++++++++++++ smtcomp/generate_benchmarks.py | 27 + smtcomp/main.py | 121 ++ smtcomp/submission.py | 25 + smtcomp/tool.py | 92 ++ smtcomp/unpack.py | 32 + submissions/Readme.md | 20 + submissions/template/generate_Readme.py | 45 + submissions/template/template.json | 30 + submissions/template/template.md | 18 + tests/solvers_divisions_final.csv | 26 + tests/test1.json | 23 + tests/test_bad.json | 1 + tests/test_validate.py | 42 + tox.ini | 18 + web/content/_index.md | 7 +- web/content/benchmark_submission/index.md | 11 +- web/content/introduction/index.md | 7 +- web/content/model/index.md | 53 +- web/content/news/2023-11-12-test.md | 9 +- ...-design-and-results-of-the-1st-SMT-COMP.md | 1 + ...-design-and-results-of-the-2nd-SMT-COMP.md | 1 + ...-design-and-results-of-the-3rd-SMT-COMP.md | 1 + ...-design-and-results-of-the-4th-SMT-COMP.md | 1 + .../publications/2010-6-years-of-smt-comp.md | 1 + .../publications/2012-the-2012-competition.md | 1 + .../publications/2013-the-2013-evaluation.md | 1 + .../2014-the-2014-smt-competition.md | 1 + .../2018-the-smt-competition-2015-2018.md | 1 + web/content/solver_submission/index.md | 13 + web/content/solver_submission/schema.html | 7 + web/hugo.toml | 7 +- web/redirects/benchmark_submission.html | 2 +- web/redirects/index.html | 2 +- web/redirects/introduction.html | 2 +- web/redirects/papers.html | 2 +- web/redirects/previous.html | 2 +- web/redirects/publications.html | 2 +- ...scss_2bceb0f0a412527b058634b2e208c849.json | 8 +- web/themes/smtcomp/assets/css/main.css | 542 ++++---- web/themes/smtcomp/assets/js/main.js | 2 +- .../smtcomp/layouts/_default/result.html | 4 +- web/themes/smtcomp/layouts/partials/menu.html | 3 +- web/themes/smtcomp/theme.toml | 2 +- 2653 files changed, 44016 insertions(+), 42643 deletions(-) create mode 100644 .devcontainer/devcontainer.json create mode 100755 .devcontainer/postCreateCommand.sh create mode 100644 .editorconfig create mode 100644 .github/actions/setup-poetry-env/action.yml create mode 100644 .github/workflows/main.yml create mode 100644 .github/workflows/validate-codecov-config.yml create mode 100644 .pre-commit-config.yaml create mode 100644 .prettierignore create mode 100644 CONTRIBUTING.md create mode 100644 Dockerfile create mode 100644 LICENSE create mode 100644 Makefile create mode 100644 README.md create mode 100644 codecov.yaml create mode 100644 poetry.lock create mode 100644 poetry.toml create mode 100644 pyproject.toml create mode 100644 smtcomp/__init__.py create mode 100644 smtcomp/archive.py create mode 100644 smtcomp/benchexec.py create mode 100644 smtcomp/benchmarks.py create mode 100644 smtcomp/convert_csv.py create mode 100644 smtcomp/defs.py create mode 100644 smtcomp/generate_benchmarks.py create mode 100644 smtcomp/main.py create mode 100644 smtcomp/submission.py create mode 100644 smtcomp/tool.py create mode 100644 smtcomp/unpack.py create mode 100644 submissions/Readme.md create mode 100644 submissions/template/generate_Readme.py create mode 100644 submissions/template/template.json create mode 100644 submissions/template/template.md create mode 100644 tests/solvers_divisions_final.csv create mode 100644 tests/test1.json create mode 100644 tests/test_bad.json create mode 100644 tests/test_validate.py create mode 100644 tox.ini create mode 100644 web/content/solver_submission/index.md create mode 100644 web/content/solver_submission/schema.html diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json new file mode 100644 index 00000000..1bfffcf3 --- /dev/null +++ b/.devcontainer/devcontainer.json @@ -0,0 +1,27 @@ +// For format details, see https://aka.ms/devcontainer.json. For config options, see the +// README at: https://github.com/devcontainers/templates/tree/main/src/python +{ + "name": "smtcomp", + // Or use a Dockerfile or Docker Compose file. More info: https://containers.dev/guide/dockerfile + "image": "mcr.microsoft.com/devcontainers/python:1-3.11-bullseye", + "features": { + "ghcr.io/devcontainers-contrib/features/poetry:2": {} + }, + + // Use 'postCreateCommand' to run commands after the container is created. + "postCreateCommand": "./.devcontainer/postCreateCommand.sh", + + // Configure tool-specific properties. + "customizations": { + "vscode": { + "extensions": ["ms-python.python", "editorconfig.editorconfig"], + "settings": { + "python.testing.pytestArgs": ["tests"], + "python.testing.unittestEnabled": false, + "python.testing.pytestEnabled": true, + "python.defaultInterpreterPath": "/workspaces/smtcomp/.venv/bin/python", + "python.testing.pytestPath": "/workspaces/smtcomp/.venv/bin/pytest" + } + } + } +} diff --git a/.devcontainer/postCreateCommand.sh b/.devcontainer/postCreateCommand.sh new file mode 100755 index 00000000..38dca893 --- /dev/null +++ b/.devcontainer/postCreateCommand.sh @@ -0,0 +1,7 @@ +#! /usr/bin/env bash + +# Install Dependencies +poetry install --with dev + +# Install pre-commit hooks +poetry run pre-commit install --install-hooks diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 00000000..9395b543 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,5 @@ +max_line_length = 120 + +[*.json] +indent_style = space +indent_size = 4 diff --git a/.github/actions/setup-poetry-env/action.yml b/.github/actions/setup-poetry-env/action.yml new file mode 100644 index 00000000..22e40534 --- /dev/null +++ b/.github/actions/setup-poetry-env/action.yml @@ -0,0 +1,33 @@ +name: "setup-poetry-env" +description: "Composite action to setup the Python and poetry environment." + +inputs: + python-version: + required: false + description: "The python version to use" + default: "3.11" + +runs: + using: "composite" + steps: + - name: Set up python + uses: actions/setup-python@v4 + with: + python-version: ${{ inputs.python-version }} + + - name: Install Poetry + uses: snok/install-poetry@v1 + with: + virtualenvs-in-project: true + + - name: Load cached venv + id: cached-poetry-dependencies + uses: actions/cache@v3 + with: + path: .venv + key: venv-${{ runner.os }}-${{ inputs.python-version }}-${{ hashFiles('poetry.lock') }} + + - name: Install dependencies + if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' + run: poetry install --no-interaction + shell: bash diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml new file mode 100644 index 00000000..377744f4 --- /dev/null +++ b/.github/workflows/main.yml @@ -0,0 +1,75 @@ +name: Main + +on: + push: + branches: + - main + pull_request: + types: [opened, synchronize, reopened] + +jobs: + quality: + runs-on: ubuntu-latest + steps: + - name: Check out + uses: actions/checkout@v3 + + - uses: actions/cache@v3 + with: + path: ~/.cache/pre-commit + key: pre-commit-${{ hashFiles('.pre-commit-config.yaml') }} + + - name: Set up the environment + uses: ./.github/actions/setup-poetry-env + + - name: Compute changed files + id: changed-files + uses: tj-actions/changed-files@v40 + with: + files: | + submissions/*.json + + - name: Show changed files + run: | + for file in ${{ steps.changed-files.outputs.all_changed_files }}; do + poetry run smtcomp show $file + done + + - name: Run checks + run: make check + + tox: + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ["3.11"] + fail-fast: false + steps: + - name: Check out + uses: actions/checkout@v3 + + - name: Set up python + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Install Poetry + uses: snok/install-poetry@v1 + + - name: Load cached venv + uses: actions/cache@v3 + with: + path: .tox + key: venv-${{ runner.os }}-${{ matrix.python-version }}-${{ hashFiles('poetry.lock') }} + + - name: Install tox + run: | + python -m pip install --upgrade pip + python -m pip install tox tox-gh-actions + + - name: Test with tox + run: tox + + - name: Upload coverage reports to Codecov with GitHub Action on Python 3.11 + uses: codecov/codecov-action@v3 + if: ${{ matrix.python-version == '3.11' }} diff --git a/.github/workflows/validate-codecov-config.yml b/.github/workflows/validate-codecov-config.yml new file mode 100644 index 00000000..d0631610 --- /dev/null +++ b/.github/workflows/validate-codecov-config.yml @@ -0,0 +1,15 @@ +name: validate-codecov-config + +on: + pull_request: + paths: [codecov.yaml] + push: + branches: [main] + +jobs: + validate-codecov-config: + runs-on: ubuntu-22.04 + steps: + - uses: actions/checkout@v3 + - name: Validate codecov configuration + run: curl -sSL --fail-with-body --data-binary @codecov.yaml https://codecov.io/validate diff --git a/.gitignore b/.gitignore index f4c73784..922131d7 100644 --- a/.gitignore +++ b/.gitignore @@ -1,7 +1,172 @@ -Makefile -_site -Gemfile -Gemfile.lock -howto-serve -*.swp -bin +docs/source + +# From https://raw.githubusercontent.com/github/gitignore/main/Python.gitignore + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +#pdm.lock +# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it +# in version control. +# https://pdm.fming.dev/#use-with-ide +.pdm.toml + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# Vscode config files +.vscode/ + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +#.idea/ + +/submission-schema.json +/submission-schema.html +schema_doc.css +schema_doc.min.js diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 00000000..f3355929 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,27 @@ +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: "v4.4.0" + hooks: + - id: check-case-conflict + - id: check-merge-conflict + - id: check-toml + - id: check-yaml + - id: end-of-file-fixer + - id: trailing-whitespace + + # - repo: https://github.com/astral-sh/ruff-pre-commit + # # Ruff version. + # rev: v0.2.1 + # hooks: + # # Run the linter. + # - id: ruff + + - repo: https://github.com/psf/black + rev: "24.1.1" + hooks: + - id: black + + - repo: https://github.com/pre-commit/mirrors-prettier + rev: "v3.0.3" + hooks: + - id: prettier diff --git a/.prettierignore b/.prettierignore new file mode 100644 index 00000000..21ba867d --- /dev/null +++ b/.prettierignore @@ -0,0 +1,3 @@ +archive +_site +web/themes/smtcomp/layouts/ diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 00000000..4b842ccf --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,133 @@ +# Contributing to `smtcomp` + +Contributions are welcome, and they are greatly appreciated! +Every little bit helps, and credit will always be given. + +You can contribute in many ways: + +# Types of Contributions + +## Report Bugs + +Report bugs at https://github.com/smtcomp/smtcomp.github.io/issues + +If you are reporting a bug, please include: + +- Your operating system name and version. +- Any details about your local setup that might be helpful in troubleshooting. +- Detailed steps to reproduce the bug. + +## Fix Bugs + +Look through the GitHub issues for bugs. +Anything tagged with "bug" and "help wanted" is open to whoever wants to implement a fix for it. + +## Implement Features + +Look through the GitHub issues for features. +Anything tagged with "enhancement" and "help wanted" is open to whoever wants to implement it. + +## Write Documentation + +Cookiecutter PyPackage could always use more documentation, whether as part of the official docs, in docstrings, or even on the web in blog posts, articles, and such. + +## Submit Feedback + +The best way to send feedback is to file an issue at https://github.com/smtcomp/smtcomp.github.io/issues. + +If you are proposing a new feature: + +- Explain in detail how it would work. +- Keep the scope as narrow as possible, to make it easier to implement. +- Remember that this is a volunteer-driven project, and that contributions + are welcome :) + +# Get Started! + +Ready to contribute? Here's how to set up `smtcomp` for local development. +Please note this documentation assumes you already have `poetry` and `Git` installed and ready to go. + +1. Fork the `smtcomp` repo on GitHub. + +2. Clone your fork locally: + +```bash +cd +git clone git@github.com:YOUR_NAME/smtcomp.git +``` + +3. Now we need to install the environment. Navigate into the directory + +```bash +cd smtcomp +``` + +If you are using `pyenv`, select a version to use locally. (See installed versions with `pyenv versions`) + +```bash +pyenv local +``` + +Then, install and activate the environment with: + +```bash +poetry install +poetry shell +``` + +4. Install pre-commit to run linters/formatters at commit time: + +```bash +poetry run pre-commit install +``` + +5. Create a branch for local development: + +```bash +git checkout -b name-of-your-bugfix-or-feature +``` + +Now you can make your changes locally. + +6. Don't forget to add test cases for your added functionality to the `tests` directory. + +7. When you're done making changes, check that your changes pass the formatting tests. + +```bash +make check +``` + +Now, validate that all unit tests are passing: + +```bash +make test +``` + +9. Before raising a pull request you should also run tox. + This will run the tests across different versions of Python: + +```bash +tox +``` + +This requires you to have multiple versions of python installed. +This step is also triggered in the CI/CD pipeline, so you could also choose to skip this step locally. + +10. Commit your changes and push your branch to GitHub: + +```bash +git add . +git commit -m "Your detailed description of your changes." +git push origin name-of-your-bugfix-or-feature +``` + +11. Submit a pull request through the GitHub website. + +# Pull Request Guidelines + +Before you submit a pull request, check that it meets these guidelines: + +1. The pull request should include tests. + +2. If the pull request adds functionality, the docs should be updated. + Put your new functionality into a function with a docstring, and add the feature to the list in `README.md`. diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 00000000..d84f7805 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,21 @@ +# syntax=docker/dockerfile:1 + +FROM python:3.11-slim-buster + +ENV POETRY_VERSION=1.4 \ + POETRY_VIRTUALENVS_CREATE=false + +# Install poetry +RUN pip install "poetry==$POETRY_VERSION" + +# Copy only requirements to cache them in docker layer +WORKDIR /code +COPY poetry.lock pyproject.toml /code/ + +# Project initialization: +RUN poetry install --no-interaction --no-ansi --no-root --no-dev + +# Copy Python code to the Docker image +COPY smtcomp /code/smtcomp/ + +CMD [ "poetry", "run", "smtcomp"] diff --git a/LICENSE b/LICENSE new file mode 100644 index 00000000..7e16b089 --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2023, SMTCOMP organizers + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/Makefile b/Makefile new file mode 100644 index 00000000..541de555 --- /dev/null +++ b/Makefile @@ -0,0 +1,54 @@ +.PHONY: install +install: ## Install the poetry environment and install the pre-commit hooks + @echo "🚀 Creating virtual environment using pyenv and poetry" + @poetry install + @poetry shell + +.PHONY: check +check: ## Run code quality tools. + @echo "🚀 Checking for obsolete submissions/Readme.md" + @poetry run python3 submissions/template/generate_Readme.py check + @echo "🚀 Checking Poetry lock file consistency with 'pyproject.toml': Running poetry lock --check" + @poetry check --lock + @echo "🚀 Linting code: Running pre-commit" + @poetry run pre-commit run -a + @echo "🚀 Static type checking: Running mypy" + @poetry run mypy + @echo "🚀 Checking for obsolete dependencies: Running deptry" + @poetry run deptry . + +.PHONY: test +test: ## Test the code with pytest + @echo "🚀 Generating submissions/Readme.md" + @poetry run python3 submissions/template/generate_Readme.py generate + @echo "🚀 Testing code: Running pytest" + @poetry run pytest --cov --cov-config=pyproject.toml --cov-report=xml + +.PHONY: build +build: clean-build ## Build wheel file using poetry + @echo "🚀 Creating wheel file" + @poetry build + +.PHONY: clean-build +clean-build: ## clean build artifacts + @rm -rf dist + +.PHONY: help +help: + @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-20s\033[0m %s\n", $$1, $$2}' + +.DEFAULT_GOAL := help + +GENERATED_SCHEMA_FILE=submission-schema.json +GENERATED_SCHEMA_HTML=submission-schema.html + +.PHONY: submission-doc +submission-doc: + @echo "🚀 Generating schema to $(GENERATED_SCHEMA_FILE)" + @poetry run smtcomp dump-json-schema $(GENERATED_SCHEMA_FILE) + @echo "🚀 Generating html doc to $(GENERATED_SCHEMA_HTML)" + @echo " Needs 'pip install json-schema-for-humans'" + generate-schema-doc --expand-buttons --no-link-to-reused-ref $(GENERATED_SCHEMA_FILE) $(GENERATED_SCHEMA_HTML) + +hugo-server: + (cd web; hugo server) \ No newline at end of file diff --git a/README.md b/README.md new file mode 100644 index 00000000..08404c5b --- /dev/null +++ b/README.md @@ -0,0 +1,83 @@ +# smtcomp + +[![Release](https://img.shields.io/github/v/release/smtcomp/smtcomp.github.io)](https://img.shields.io/github/v/release/smtcomp/smtcomp.github.io) +[![Build status](https://img.shields.io/github/actions/workflow/status/smtcomp/smtcomp.github.io/main.yml?branch=main)](https://github.com/smtcomp/smtcomp.github.io/actions/workflows/main.yml?query=branch%3Amain) +[![codecov](https://codecov.io/gh/smtcomp/smtcomp.github.io/branch/main/graph/badge.svg)](https://codecov.io/gh/smtcomp/smtcomp.github.io) +[![Commit activity](https://img.shields.io/github/commit-activity/m/smtcomp/smtcomp.github.io)](https://img.shields.io/github/commit-activity/m/smtcomp/smtcomp.github.io) +[![License](https://img.shields.io/github/license/smtcomp/smtcomp.github.io)](https://img.shields.io/github/license/smtcomp/smtcomp.github.io) + +Tools used for the organization of the SMT competition + +- **Github repository**: +- **Documentation** + +## Getting started with development in this project + +```bash +git clone git@github.com:smtcomp/smtcomp.github.io.git +``` + +Finally, install the environment and the pre-commit hooks with + +```bash +make install +``` + +You are now ready to start development on your project! +The CI/CD pipeline will be triggered when you open a pull request, merge to main, or when you create a new release. + +To finalize the set-up for publishing to PyPi or Artifactory, see [here](https://fpgmaas.github.io/cookiecutter-poetry/features/publishing/#set-up-for-pypi). +For activating the automatic documentation with MkDocs, see [here](https://fpgmaas.github.io/cookiecutter-poetry/features/mkdocs/#enabling-the-documentation-on-github). +To enable the code coverage reports, see [here](https://fpgmaas.github.io/cookiecutter-poetry/features/codecov/). + +## Using the smtcomp tool for generating benchexec + +#### Generate submissions [Optional] + The final solvers submitted during the smtcomp 2023 can be used: + +``` +smtcomp convert-csv tests/solvers_divisions_final.csv ../tmp/submissions +``` + +The generated files can be visualized using: + +``` +smtcomp show ../tmp/submissions/YicesQS.json +``` + +The solver downloaded using: + +``` +smtcomp download-archive ../tmp/submissions/*.json ../tmp/benchexec/cache +``` + +Trivial tests benchmarks generated with: + +``` +smtcomp generate-benchmarks ../tmp/benchexec/includes/ +``` + +The benchexec tasks generated using: + +``` +smtcomp generate-benchexec ../tmp/submissions/*.json ../tmp/includes/all.xml ../tmp/benchexec/cache +``` + +Benchexec started using: + +``` +(cd ../tmp/benchexec/includes; benchexec all.xml --read-only-dir / --overlay-dir /home --full-access-dir .. --numOfThreads 8 -M 2GB -c 1) +``` + + + + + + + + + + +--- + +Repository initiated with [fpgmaas/cookiecutter-poetry](https://github.com/fpgmaas/cookiecutter-poetry). diff --git a/archive/2012/benchmark_selection.html b/archive/2012/benchmark_selection.html index 92fa3d3b..c503b3ea 100644 --- a/archive/2012/benchmark_selection.html +++ b/archive/2012/benchmark_selection.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2012 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -130,7 +130,6 @@

Tool Download

- + - diff --git a/archive/2012/benchmark_selection.md b/archive/2012/benchmark_selection.md index 11067961..bc06993f 100644 --- a/archive/2012/benchmark_selection.md +++ b/archive/2012/benchmark_selection.md @@ -50,4 +50,3 @@ source file. A less algorithmic description can be found in the - [select_benchmarks](tools/select_benchmarks_2012.c): Given an input file of benchmarks, selects benchmarks according to the rules; statistics on stderr, benchmarks on stdout. Some changes may be made to this program before it is used officially for the competition benchmark selection; it is intended that these changes be limited to bug fixes, removal of obsolete debugging code, and documentation cleanups. - [Eligible Benchmarks](smtcomp2012-eligible.txt.bz2): These are the eligible 2012 benchmarks for inclusion. This file lists SMT-COMP 2012 benchmarks and their metadata. - [All Benchmarks](smtlib2012-benchs.txt.bz2): this is the total list of 2012 benchmarks, including ineligible benchmarks. - diff --git a/archive/2012/benchmarks.html b/archive/2012/benchmarks.html index 6e2b3c80..1a9e5906 100644 --- a/archive/2012/benchmarks.html +++ b/archive/2012/benchmarks.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2012 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -129,7 +129,7 @@

Benchmarks from 2011

Unsat Core Track Benchmarks

-

The unsat core benchmarks are the subset of the benchmarks from the indicated +

The unsat core benchmarks are the subset of the benchmarks from the indicated logics that are unsatisfiable; the benchmarks themselves have been modified to include names for the assertions. Per the SMTLIB standard, the benchmarks are allowed to contain a mix of named and unmaned formulae, though ordinarily, @@ -189,7 +189,6 @@

Format for Results Files

- + - diff --git a/archive/2012/benchmarks.md b/archive/2012/benchmarks.md index 90339bea..e480aa0f 100644 --- a/archive/2012/benchmarks.md +++ b/archive/2012/benchmarks.md @@ -54,7 +54,7 @@ detailed description of the benchmarks can be found in the following paper: Efficient Symbolic Automated Analysis of Administrative Attribute-based RBAC-Policies, by F. Alberti, A. Armando, and S. Ranise. ### Unsat Core Track Benchmarks -The unsat core benchmarks are the subset of the benchmarks from the indicated +The unsat core benchmarks are the subset of the benchmarks from the indicated logics that are unsatisfiable; the benchmarks themselves have been modified to include names for the assertions. Per the SMTLIB standard, the benchmarks are allowed to contain a mix of named and unmaned formulae, though ordinarily, diff --git a/archive/2012/call12.txt b/archive/2012/call12.txt index 6131bc29..0d858518 100644 --- a/archive/2012/call12.txt +++ b/archive/2012/call12.txt @@ -35,10 +35,10 @@ will be evaluated and the results publicized, but no winners will be awarded. Main changes with respect to SMT-COMP 2011 ------------------------------------------ -- We are concentrating on just a few of the benchmark divisions this - year for the main competition. Some divisions, such as QF_UF, are - subsumed into more expressive logics; others have received only - light interest. Our goal is to focus the competition on divisions +- We are concentrating on just a few of the benchmark divisions this + year for the main competition. Some divisions, such as QF_UF, are + subsumed into more expressive logics; others have received only + light interest. Our goal is to focus the competition on divisions of particular interest to applications. All non-competition divisions will be run in exhibition mode, if solvers are submitted against them, and the results will be displayed and publicly reported. @@ -65,7 +65,7 @@ For more detailed information please refer to the rules posted at http://www.smtcomp.org/. In order to deliver high-quality benchmarks and to allow participants -to test their solvers we consider the deadline of April 15 strict. +to test their solvers we consider the deadline of April 15 strict. Benchmarks arriving thereafter will be included in the library but not used for SMT-COMP 2012. @@ -76,8 +76,8 @@ Benchmarks for Main and Parallel Track New benchmarks in all categories are welcome, particularly those representative of applications and those relevant to this year's competition. -The potential main competition benchmark divisions for this year include the -following divisions. For detailed descriptions of the divisions, refer to the +The potential main competition benchmark divisions for this year include the +following divisions. For detailed descriptions of the divisions, refer to the SMT-LIB web page at http://www.smtlib.org/ * QF_BV @@ -85,8 +85,8 @@ SMT-LIB web page at http://www.smtlib.org/ * QF_UFLIA, including benchmarks from QF_LIA * QF_UFLRA, including benchmarks from QF_LRA * QF_IDL -* AUFLIA+p -* AUFLIA-p +* AUFLIA+p +* AUFLIA-p * AUFNIRA where the benchmark categories are described below @@ -153,14 +153,14 @@ the solver multiple times by adding and retracting assertions. Each benchmark can be thought of as a "trace" dumped from the interaction of the solver with, e.g., a model-checker. The benchmarks will be executed using a "trace executor" which -simulates (and abstracts away) the online interaction with the +simulates (and abstracts away) the online interaction with the model checker. Please refer to the rules for more details. -*** We strongly encourage submissions of benchmarks for this track. *** +*** We strongly encourage submissions of benchmarks for this track. *** -We will execute the competition for any benchmark divisions for +We will execute the competition for any benchmark divisions for which we have adequate benchmarks and entrants. This is expected to include at least the divisions used in 2011: QF_BV, QF_LIA, QF_UFLIA, and QF_LRA. @@ -174,7 +174,7 @@ benchmarks. However, we highly encourage submission of benchmarks that elucidate particular problems or challenges in determining unsatisfiable cores or generating proofs. -Note that, in SMT-LIB format, an unsat-core benchmark must have +Note that, in SMT-LIB format, an unsat-core benchmark must have its asserted formulae named (see the standard or contact the organizers). The organizers are considering and accepting comment on which divisions @@ -196,15 +196,15 @@ Important Dates application track posted for comment. First version of the benchmark scrambler, benchmark selector and trace executor made available. - + * April 15 Final version of the benchmark scrambler, benchmark selector and trace exector made available. - -* April 15 No new benchmarks can be added after this date, but + +* April 15 No new benchmarks can be added after this date, but problems with existing benchmarks may be fixed. * June 1 Benchmark libraries are frozen. - + * June 15 (7pm EDT) Solvers due via StarExec (for all tracks), including system descriptions and magic numbers for benchmark scrambling. @@ -213,7 +213,7 @@ Important Dates marks the end of the weekend grace period for submissions). -* June 20 Opening value of NYSE Composite Index used to complete +* June 20 Opening value of NYSE Composite Index used to complete random seed. * June 26 - June 29 Dates of IJCAR main conference diff --git a/archive/2012/index.html b/archive/2012/index.html index e72a976c..d026b2bb 100644 --- a/archive/2012/index.html +++ b/archive/2012/index.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2012 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -114,7 +114,6 @@

Acknowledgments

- + - diff --git a/archive/2012/participants.html b/archive/2012/participants.html index a4cf461a..7655dd9a 100644 --- a/archive/2012/participants.html +++ b/archive/2012/participants.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2012 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -545,7 +545,6 @@

Exhibition Track (Demonstration)

- + - diff --git a/archive/2012/results.html b/archive/2012/results.html index ea2348fc..210279f9 100644 --- a/archive/2012/results.html +++ b/archive/2012/results.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2012 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -98,7 +98,6 @@

SMT-COMP 2012 Results

- + - diff --git a/archive/2012/results/AUFLIA+p.html b/archive/2012/results/AUFLIA+p.html index 731400f3..145e5265 100644 --- a/archive/2012/results/AUFLIA+p.html +++ b/archive/2012/results/AUFLIA+p.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2012 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -141,7 +141,6 @@

Result table1

- + - diff --git a/archive/2012/results/AUFLIA-p.html b/archive/2012/results/AUFLIA-p.html index cc8ee2b9..8989dde8 100644 --- a/archive/2012/results/AUFLIA-p.html +++ b/archive/2012/results/AUFLIA-p.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2012 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -141,7 +141,6 @@

Result table1

- + - diff --git a/archive/2012/results/AUFNIRA.html b/archive/2012/results/AUFNIRA.html index 27d02308..cb1ad76b 100644 --- a/archive/2012/results/AUFNIRA.html +++ b/archive/2012/results/AUFNIRA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2012 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -136,7 +136,6 @@

Result table

- + - diff --git a/archive/2012/results/QF_AUFBV.html b/archive/2012/results/QF_AUFBV.html index 0fc4c7ba..7a3defb5 100644 --- a/archive/2012/results/QF_AUFBV.html +++ b/archive/2012/results/QF_AUFBV.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2012 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -176,7 +176,6 @@

Result table1

- + - diff --git a/archive/2012/results/QF_BV.html b/archive/2012/results/QF_BV.html index 9c07f6fb..bf27b3b4 100644 --- a/archive/2012/results/QF_BV.html +++ b/archive/2012/results/QF_BV.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2012 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -186,7 +186,6 @@

Result table1

- + - diff --git a/archive/2012/results/QF_IDL.html b/archive/2012/results/QF_IDL.html index f1407e48..600e48ce 100644 --- a/archive/2012/results/QF_IDL.html +++ b/archive/2012/results/QF_IDL.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2012 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -96,7 +96,7 @@

SMT-COMP 2012

QF_IDL (Main Track)

-

Benchmarks in this division: 174 +

Benchmarks in this division: 174
Time Limit: 1200s

@@ -141,7 +141,6 @@

Result table1

- + - diff --git a/archive/2012/results/QF_UFLIA.html b/archive/2012/results/QF_UFLIA.html index 3a91a7da..222b069c 100644 --- a/archive/2012/results/QF_UFLIA.html +++ b/archive/2012/results/QF_UFLIA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2012 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -96,7 +96,7 @@

SMT-COMP 2012

QF_UFLIA (Main Track)

-

Benchmarks in this division: 210 +

Benchmarks in this division: 210
Time Limit: 1200s

@@ -166,7 +166,6 @@

Result table1

- + - diff --git a/archive/2012/results/QF_UFLRA.html b/archive/2012/results/QF_UFLRA.html index 276bd8ba..697d8590 100644 --- a/archive/2012/results/QF_UFLRA.html +++ b/archive/2012/results/QF_UFLRA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2012 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -166,7 +166,6 @@

Result table1

- + - diff --git a/archive/2012/results/summary.html b/archive/2012/results/summary.html index 89ba521c..bf10b134 100644 --- a/archive/2012/results/summary.html +++ b/archive/2012/results/summary.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2012 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -118,7 +118,6 @@

Main Track (Summary)

- + - diff --git a/archive/2012/rules.html b/archive/2012/rules.html index 958b72c7..09688bc1 100644 --- a/archive/2012/rules.html +++ b/archive/2012/rules.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2012 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -76,7 +76,7 @@

Rules

is still in development. --> -

Submission this year will be through the +

Submission this year will be through the SMT-Exec service, an execution service for SMT solvers. You are encouraged to upload early and often to test your solver on the competition infrastructure. SMT-Exec allows @@ -97,7 +97,6 @@

Rules

- + - diff --git a/archive/2012/rules.md b/archive/2012/rules.md index d68e2094..d91b69db 100644 --- a/archive/2012/rules.md +++ b/archive/2012/rules.md @@ -13,7 +13,7 @@ Submission this year is planned to be through the STAR-EXEC platform, which is still in development. --> -Submission this year will be through the +Submission this year will be through the SMT-Exec service, an execution service for SMT solvers. You are encouraged to upload early and often to test your solver on the competition infrastructure. SMT-Exec allows diff --git a/archive/2012/specs.html b/archive/2012/specs.html index bafa679d..6186b34d 100644 --- a/archive/2012/specs.html +++ b/archive/2012/specs.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2012 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -99,9 +99,9 @@

Machine Specifications

benchmarks supported by your solver.

-

Note that the organizers had expected the Star-Exec service to be ready for +

Note that the organizers had expected the Star-Exec service to be ready for the 2012 competition; the Star-Exec service would have provided considerably -more resources. However, development delays have required us (as of mid-May +more resources. However, development delays have required us (as of mid-May 2012) to plan to use SMT-Exec again this year.

@@ -114,7 +114,6 @@

Machine Specifications

- + - diff --git a/archive/2012/specs.md b/archive/2012/specs.md index 61d89766..94caf13f 100644 --- a/archive/2012/specs.md +++ b/archive/2012/specs.md @@ -30,7 +30,7 @@ it is checked for compatibility with the competition infrastructure on simple benchmarks supported by your solver. -Note that the organizers had expected the Star-Exec service to be ready for +Note that the organizers had expected the Star-Exec service to be ready for the 2012 competition; the Star-Exec service would have provided considerably -more resources. However, development delays have required us (as of mid-May +more resources. However, development delays have required us (as of mid-May 2012) to plan to use SMT-Exec again this year. diff --git a/archive/2012/tools.html b/archive/2012/tools.html index bc9b1b6a..3a0eb5c4 100644 --- a/archive/2012/tools.html +++ b/archive/2012/tools.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2012 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -94,7 +94,6 @@

Tools

- + - diff --git a/archive/2012/tools/TreeLimitedRun.c b/archive/2012/tools/TreeLimitedRun.c index 4689cbaf..b47ad1e8 100644 --- a/archive/2012/tools/TreeLimitedRun.c +++ b/archive/2012/tools/TreeLimitedRun.c @@ -476,13 +476,13 @@ int DelayBetweenChecks,struct timeval WCStartTime,int PrintEachCheck) { PrintTimes("WATCH",LastTreeTime,WallClockSoFar(WCStartTime),GetProcessMem(ChildPID)); } - } while ((CPUTimeLimit == 0 || LastTreeTime <= CPUTimeLimit) && + } while ((CPUTimeLimit == 0 || LastTreeTime <= CPUTimeLimit) && NumberInTree > 0 && !GlobalInterrupted); //----If over time limit, stop them all (XCPU to top guy first) if (NumberInTree > 0 && LastTreeTime > CPUTimeLimit) { KilledInTree = KillTree(getuid(),ChildPID,SIGXCPU); - + printedTimeout = 1; fprintf(stdout,"Timeout\n"); fflush(stdout); diff --git a/archive/2012/tools/generate_benchmark_selection_list.py b/archive/2012/tools/generate_benchmark_selection_list.py index a1c945ee..ff3c103d 100644 --- a/archive/2012/tools/generate_benchmark_selection_list.py +++ b/archive/2012/tools/generate_benchmark_selection_list.py @@ -39,7 +39,7 @@ def get_difficulty(times_in_seconds): A = 30 - minutes = [min(t/60.0, 30.0) for t in times_in_seconds] + minutes = [min(t / 60.0, 30.0) for t in times_in_seconds] minutes.sort() if len(minutes) >= 5: minutes = minutes[1:-1] @@ -48,19 +48,22 @@ def get_difficulty(times_in_seconds): difficulty = (5.0 * math.log(1.0 + A**2)) / (math.log(1.0 + 30**2)) return round(difficulty, 3) + def get_solution(sols, benchsol): - if benchsol in ('sat', 'unsat'): + if benchsol in ("sat", "unsat"): return benchsol else: if len(sols) > 1 and len(set(sols)) == 1: s = sols[0] - if s in ('sat', 'unsat'): + if s in ("sat", "unsat"): return s - return 'unknown' + return "unknown" + def warn(msg): sys.stderr.write(msg) + def main(): db = MySQLdb.connect(db=SMTEXEC_DB, user=SMTEXEC_USER, passwd=SMTEXEC_PWD) c = db.cursor() @@ -75,15 +78,14 @@ def main(): cursol = row[2].strip() if curkey != prevkey: if prevkey: - difficulty = '%.3f' % get_difficulty(times) + difficulty = "%.3f" % get_difficulty(times) solution = get_solution(sols, prevkey[-1]) - if solution in ('sat', 'unsat'): + if solution in ("sat", "unsat"): division = prevkey[0] - family, filename = prevkey[1].split('/', 1) + family, filename = prevkey[1].split("/", 1) benchid = prevkey[2] category = prevkey[3] - entries.append([division, family, category, difficulty, - solution, benchid, filename]) + entries.append([division, family, category, difficulty, solution, benchid, filename]) else: excluded += 1 pass @@ -92,7 +94,7 @@ def main(): times = [] sols = [] t = float(row[7]) - if cursol not in ('sat', 'unsat'): + if cursol not in ("sat", "unsat"): t = 1800.0 else: sols.append(cursol) @@ -100,16 +102,15 @@ def main(): if prevkey: solution = get_solution(sols, prevkey[-1]) - if solution in ('sat', 'unsat'): - difficulty = '%.3f' % get_difficulty(times) + if solution in ("sat", "unsat"): + difficulty = "%.3f" % get_difficulty(times) division = prevkey[0] - family, filename = prevkey[1].split('/', 1) + family, filename = prevkey[1].split("/", 1) benchid = prevkey[2] category = prevkey[3] - entries.append([division, family, category, difficulty, - solution, benchid, filename]) + entries.append([division, family, category, difficulty, solution, benchid, filename]) else: - #warn('EXCLUDING %s\n' % prevkey) + # warn('EXCLUDING %s\n' % prevkey) excluded += 1 c.close() @@ -117,13 +118,13 @@ def main(): entries.sort() pr = sys.stdout.write - pr('%d\n' % len(entries)) + pr("%d\n" % len(entries)) for r in entries: - pr('%s\n' % ' '.join(map(str, r))) - pr('END x industrial 0 sat 0 x\n') + pr("%s\n" % " ".join(map(str, r))) + pr("END x industrial 0 sat 0 x\n") + + sys.stderr.write("created competition pool with %d / %d benchmarks\n" % (len(entries), len(entries) + excluded)) - sys.stderr.write('created competition pool with %d / %d benchmarks\n' % - (len(entries), len(entries) + excluded)) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/archive/2012/tools/get_selected_benchmarks_paths.py b/archive/2012/tools/get_selected_benchmarks_paths.py index 0be61e12..fab41247 100644 --- a/archive/2012/tools/get_selected_benchmarks_paths.py +++ b/archive/2012/tools/get_selected_benchmarks_paths.py @@ -7,9 +7,9 @@ import os, sys + def usage(): - sys.stdout.write("Usage: %s benchmarks-file selected-ids-file\n" % - os.path.basename(sys.argv[0])) + sys.stdout.write("Usage: %s benchmarks-file selected-ids-file\n" % os.path.basename(sys.argv[0])) sys.exit(1) @@ -22,15 +22,15 @@ def main(): pr = sys.stdout.write with open(sys.argv[1]) as f: - f.readline() # skip the first line with the count + f.readline() # skip the first line with the count for line in f: bits = line.split() bench_id = int(bits[5]) if bench_id in selected: - pth = '%s/%s/%s' % (bits[0], bits[1], bits[-1]) + pth = "%s/%s/%s" % (bits[0], bits[1], bits[-1]) pr(pth) - pr('\n') + pr("\n") -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/archive/2012/tools/select_benchmarks_2012.c b/archive/2012/tools/select_benchmarks_2012.c index 6df2c530..47f35773 100644 --- a/archive/2012/tools/select_benchmarks_2012.c +++ b/archive/2012/tools/select_benchmarks_2012.c @@ -271,7 +271,7 @@ int main(int argc, char *argv[]) { if( !(division = buf) || !(family = strchr(division, ' ')) || - !(s_category = strchr(family + 1, ' ')) || + !(s_category = strchr(family + 1, ' ')) || !(s_difficulty = strchr(s_category + 1, ' ')) || !(s_solution = strchr(s_difficulty + 1, ' ')) || !(s_benchmarkid = strchr(s_solution + 1, ' ')) || diff --git a/archive/2014/benchmarks.html b/archive/2014/benchmarks.html index 8bad76d0..878c8c34 100644 --- a/archive/2014/benchmarks.html +++ b/archive/2014/benchmarks.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2014 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -80,7 +80,7 @@

Benchmarks

(set-info :source |<information about where the benchmark came from including author contact, paper citations, etc.>|) (set-info :smt-lib-version 2.0) -(set-info :category <either "industrial", "crafted", or "random">) +(set-info :category <either "industrial", "crafted", or "random">) (set-info :status <either sat, unsat, or unknown>)

In :category, the quotes are needed in the benchmark file.

@@ -98,7 +98,7 @@

Benchmarks

Benchmark Set And Difficulty Measurements

-

The benchmarks used in the competition are selected from the +

The benchmarks used in the competition are selected from the SMT-LIB benchmark set. The complete set is reduced by a few restrictions:

  • The correct result (sat or unsat) must be known
  • @@ -109,7 +109,7 @@

    Benchmark Set And Difficulty

    For the last point a measurement of difficulty is needed. This is an approximate measure meant simply to make the competition more challenging. For 2014 we used the value of the fastest solver on that benchmark in the 2013 evaluation. For benchmarks introduced since then, an approximate value was determined by taking the best value from running one or more solvers from the evaluation during the preparation for the 2014 competition.

    -

    The table from which benchmarks will be chosen for the competition is available here. +

    The table from which benchmarks will be chosen for the competition is available here. The table will be updated during the course of preparing for the competition as benchmarks are corrected, removed because they are ill-formed or inappropriate, moved among logics as needed, or difficuly or expected results determined.

    The table is in space-separated form that is very amenable to text processing tools such as awk, grep and sort. You can turn it into a comma-separated table for Excel or similar tools by using sed or tr to turn spaces into commas.

    @@ -119,7 +119,7 @@

    Benchmark Set And Difficulty
    1. id - the StarExec benchmark id. Note that this id will change if the benchmark is corrected. This is happening frequently during the preparation for the competition. The ids will be resynchronized with StarExec just prior to the final selection.
    2. difficulty - this value is in seconds, or is one of the words “trivial” or “unknown”. “trivial” benchmarks are those that have been determmined to be easy for every solver and so excluded by the rules. “unknown” means the difficulty value has not yet been determined. Valaues of 1500 or 1600 generally indicate that all solvers quit without a result (timeout or memout).
    3. -
    4. starexec-expected-result - this is the metadata value from StarExec and is the correct value expected for the benchmark. This should be one of +
    5. starexec-expected-result - this is the metadata value from StarExec and is the correct value expected for the benchmark. This should be one of “sat”, “unsat” or “unknown” (without the quotes). However, ther eare also values of “null” (metadata value is not set).
    6. status - this is the :status value from the benchmark, as recorded in the StarExec metadata. It should be precisely the same as the starexec-expected-result.
    7. category - this is the :category value as recorded in StarExec metadata, which should have determined it from a set-info command in the benchmark. This value should be one of “industrial”, “crafted”, “random”, or “check” (without the quotes). Values of “null” (category not set), “unknown” (category explicitly set to the value “unknown”), and “none” (value in StarExec is an empty string) are also present.
    8. @@ -137,7 +137,6 @@

      Benchmark Set And Difficulty

- + - diff --git a/archive/2014/benchmarks.md b/archive/2014/benchmarks.md index c610d739..06140118 100644 --- a/archive/2014/benchmarks.md +++ b/archive/2014/benchmarks.md @@ -14,7 +14,7 @@ that logic. (set-info :source |<information about where the benchmark came from including author contact, paper citations, etc.>|) (set-info :smt-lib-version 2.0) -(set-info :category <either "industrial", "crafted", or "random">) +(set-info :category <either "industrial", "crafted", or "random">) (set-info :status <either sat, unsat, or unknown>) In :category, the quotes are needed in the benchmark file. @@ -30,7 +30,7 @@ Thanks to the following for new benchmarks in 2014: ### Benchmark Set And Difficulty Measurements -The benchmarks used in the competition are selected from the +The benchmarks used in the competition are selected from the SMT-LIB benchmark set. The complete set is reduced by a few restrictions: - The correct result (sat or unsat) must be known - Trivial benchmarks defined in the rules as those that are quickly solved by all solvers in the previous competition or evaluation. @@ -39,8 +39,8 @@ SMT-LIB benchmark set. The complete set is reduced by a few restrictions: For the last point a measurement of difficulty is needed. This is an approximate measure meant simply to make the competition more challenging. For 2014 we used the value of the fastest solver on that benchmark in the 2013 evaluation. For benchmarks introduced since then, an approximate value was determined by taking the best value from running one or more solvers from the evaluation during the preparation for the 2014 competition. -The table from which benchmarks will be chosen for the competition is available here. -The table will be updated during the course of preparing for the competition as benchmarks are corrected, removed because they are ill-formed or inappropriate, moved among logics as needed, or difficuly or expected results determined. +The table from which benchmarks will be chosen for the competition is available here. +The table will be updated during the course of preparing for the competition as benchmarks are corrected, removed because they are ill-formed or inappropriate, moved among logics as needed, or difficuly or expected results determined. The table is in space-separated form that is very amenable to text processing tools such as awk, grep and sort. You can turn it into a comma-separated table for Excel or similar tools by using sed or tr to turn spaces into commas. @@ -50,7 +50,7 @@ The columns in the table are these: 1. **id** - the StarExec benchmark id. Note that this id will change if the benchmark is corrected. This is happening frequently during the preparation for the competition. The ids will be resynchronized with StarExec just prior to the final selection. 2. **difficulty** - this value is in seconds, or is one of the words "trivial" or "unknown". "trivial" benchmarks are those that have been determmined to be easy for every solver and so excluded by the rules. "unknown" means the difficulty value has not yet been determined. Valaues of 1500 or 1600 generally indicate that all solvers quit without a result (timeout or memout). -3. **starexec-expected-result** - this is the metadata value from StarExec and is the correct value expected for the benchmark. This should be one of +3. **starexec-expected-result** - this is the metadata value from StarExec and is the correct value expected for the benchmark. This should be one of "sat", "unsat" or "unknown" (without the quotes). However, ther eare also values of "null" (metadata value is not set). 4. **status** - this is the :status value from the benchmark, as recorded in the StarExec metadata. It should be precisely the same as the starexec-expected-result. 5. **category** - this is the :category value as recorded in StarExec metadata, which should have determined it from a set-info command in the benchmark. This value should be one of "industrial", "crafted", "random", or "check" (without the quotes). Values of "null" (category not set), "unknown" (category explicitly set to the value "unknown"), and "none" (value in StarExec is an empty string) are also present. diff --git a/archive/2014/call-for-comments.txt b/archive/2014/call-for-comments.txt index 00d8c4ba..27d32bc4 100644 --- a/archive/2014/call-for-comments.txt +++ b/archive/2014/call-for-comments.txt @@ -13,9 +13,9 @@ ====================================================================== SMT-COMP is the annual competition among Satisfiability Modulo Theories -(SMT) solvers. The goals of SMT-COMP are to spur solver advances, collect +(SMT) solvers. The goals of SMT-COMP are to spur solver advances, collect benchmarks, and encourage adoption of the SMT-LIB standard, through -friendly competition. +friendly competition. SMT-COMP'14 will be held as part of the 'Olympic Games' at the 'Summer of Logic', in Vienna, Austria (July 9-24). @@ -24,7 +24,7 @@ results announced at the SMT workshop (July 17-18). The Olympic Games consists of 12 different competitions in various aspects of logic. -SMTCOMP'14 is organized under the direction of the SMT Steering +SMTCOMP'14 is organized under the direction of the SMT Steering committee. The organizing team for SMTCOMP'14 is David Cok - GrammaTech, Inc., USA David Deharbe - Federal University of Rio Grande do Norte, Brazil @@ -32,7 +32,7 @@ committee. The organizing team for SMTCOMP'14 is This is a call for three things: -CALL FOR COMMENTS: +CALL FOR COMMENTS: The organizing team is preparing the schedule and rules for 2014. Any comments you may have to improve the competition over past years or to redirect its focus are welcome and will be considered by the @@ -45,15 +45,15 @@ of benchmarks in various categories is used for the competition (according to rules that are currently being revised). If you have benchmarks that you think would be useful to SMT-LIB -and SMT-COMP (and may be made public), please let us know as soon -as possible, even if the material is not quite ready. The deadline -for benchmarks to be used in the 2014 competition will be announced +and SMT-COMP (and may be made public), please let us know as soon +as possible, even if the material is not quite ready. The deadline +for benchmarks to be used in the 2014 competition will be announced when the schedule and rules are finalized. CALL FOR SOLVERS: A submission deadline for solvers will be announced along with the rules. However, it is useful to the team to know which and how many -solvers may be entering. Thus we request that you let us know +solvers may be entering. Thus we request that you let us know if you think you may be submitting one or more solvers to the competition, particularly if you think there may be unusual circumstances. diff --git a/archive/2014/index.html b/archive/2014/index.html index 2f296435..6a65a665 100644 --- a/archive/2014/index.html +++ b/archive/2014/index.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2014 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -72,7 +72,7 @@

SMT-COMP 2014

(July 9-24, 2014, Vienna, AU). In particular, SMT-COMP is part of the FLoC'14 Olympic Games. -The SMT workshop will include a block of time to present the competitors and results of the SMTCOMP competition. There will also be an awards ceremony +The SMT workshop will include a block of time to present the competitors and results of the SMTCOMP competition. There will also be an awards ceremony on July 21, as part of the Olympic Games.

@@ -124,7 +124,6 @@

Acknowledgments

- + - diff --git a/archive/2014/participants.html b/archive/2014/participants.html index 4ab023f4..61c91e65 100644 --- a/archive/2014/participants.html +++ b/archive/2014/participants.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2014 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -218,7 +218,7 @@

SMT-COMP 2014 Participants

SMTInterpol Jochen Hoenicke (hoenicke@informatik.uni-freiburg.de), Jürgen Christ (christj@informatik.uni-freiburg.de) - UFIDL1 UFLIA1 - UFLRA1,2 + UFLRA1,2 UFNIA1 @@ -357,11 +357,11 @@

SMT-COMP 2014 Participants

Abziz1 - X + X Abziz21 - X + X AProVE1 @@ -381,7 +381,7 @@

SMT-COMP 2014 Participants

CVC31,2 - X X X X X X X X X X X X X X X X X X X + X X X X X X X X X X X X X X X X X X X CVC41,2 @@ -407,7 +407,7 @@

SMT-COMP 2014 Participants

SMTInterpol1,2 X X X X X X X X - SLEEK - X X X X - X + X X + X SLIDE - X + X - X + X SPEN - + X X - X + X @@ -606,7 +606,6 @@

SL-COMP 2014 Participants

- + - diff --git a/archive/2014/participants.md b/archive/2014/participants.md index b24566ca..7d7f9463 100644 --- a/archive/2014/participants.md +++ b/archive/2014/participants.md @@ -154,7 +154,7 @@ These solvers have expressed intention to participate in SMT-COMP 2014. SMTInterpol Jochen Hoenicke (hoenicke@informatik.uni-freiburg.de), Jürgen Christ (christj@informatik.uni-freiburg.de) - UFIDL1 UFLIA1 - UFLRA1,2 + UFLRA1,2 UFNIA1 @@ -293,11 +293,11 @@ These are the logic divisions in which each solver is competing. Abziz1 - X + X Abziz21 - X + X AProVE1 @@ -317,7 +317,7 @@ These are the logic divisions in which each solver is competing. CVC31,2 - X X X X X X X X X X X X X X X X X X X + X X X X X X X X X X X X X X X X X X X CVC41,2 @@ -343,7 +343,7 @@ These are the logic divisions in which each solver is competing. SMTInterpol1,2 X X X X X X X X - SLEEK - X X X X - X + X X + X SLIDE - X + X - X + X SPEN - + X X - X + X diff --git a/archive/2014/results.html b/archive/2014/results.html index 45ce5a6d..1e6af96f 100644 --- a/archive/2014/results.html +++ b/archive/2014/results.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2014 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -121,7 +121,6 @@

SL-COMP 2014 Results

- + - diff --git a/archive/2014/results/ALIA.html b/archive/2014/results/ALIA.html index 5096a3e4..86648c98 100644 --- a/archive/2014/results/ALIA.html +++ b/archive/2014/results/ALIA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2014 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -161,7 +161,6 @@

Division COMPLETE: The winner is C

- + - diff --git a/archive/2014/results/AUFLIA.html b/archive/2014/results/AUFLIA.html index b6a0a000..e043cdec 100644 --- a/archive/2014/results/AUFLIA.html +++ b/archive/2014/results/AUFLIA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2014 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -162,7 +162,6 @@

Division COMPLETE: The winner is C

- + - diff --git a/archive/2014/results/AUFLIRA.html b/archive/2014/results/AUFLIRA.html index 1045287e..281b6130 100644 --- a/archive/2014/results/AUFLIRA.html +++ b/archive/2014/results/AUFLIRA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2014 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -161,7 +161,6 @@

Division COMPLETE: The winner is C

- + - diff --git a/archive/2014/results/AUFNIRA-app.html b/archive/2014/results/AUFNIRA-app.html index 591af2d1..1127332f 100644 --- a/archive/2014/results/AUFNIRA-app.html +++ b/archive/2014/results/AUFNIRA-app.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2014 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -148,7 +148,6 @@

Parallel Performance

- + - diff --git a/archive/2014/results/AUFNIRA.html b/archive/2014/results/AUFNIRA.html index de0fc732..b5f38824 100644 --- a/archive/2014/results/AUFNIRA.html +++ b/archive/2014/results/AUFNIRA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2014 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -157,7 +157,6 @@

Division COMPLETE: The winner is C

- + - diff --git a/archive/2014/results/BV.html b/archive/2014/results/BV.html index a989a1a0..d2d83be0 100644 --- a/archive/2014/results/BV.html +++ b/archive/2014/results/BV.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2014 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -112,7 +112,6 @@

BV (Main Track)

- + - diff --git a/archive/2014/results/LIA.html b/archive/2014/results/LIA.html index d9094c37..401f87d5 100644 --- a/archive/2014/results/LIA.html +++ b/archive/2014/results/LIA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2014 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -162,7 +162,6 @@

Division COMPLETE: The winner is C

- + - diff --git a/archive/2014/results/LRA.html b/archive/2014/results/LRA.html index 4d7aaf23..0e14dd8c 100644 --- a/archive/2014/results/LRA.html +++ b/archive/2014/results/LRA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2014 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -162,7 +162,6 @@

Division COMPLETE: The winner is C

- + - diff --git a/archive/2014/results/NIA.html b/archive/2014/results/NIA.html index 8001f360..1f34334f 100644 --- a/archive/2014/results/NIA.html +++ b/archive/2014/results/NIA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2014 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -149,7 +149,6 @@

Division COMPLETE: The winner is C

- + - diff --git a/archive/2014/results/NRA.html b/archive/2014/results/NRA.html index 69b9754b..693a5500 100644 --- a/archive/2014/results/NRA.html +++ b/archive/2014/results/NRA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2014 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -149,7 +149,6 @@

Division COMPLETE: The winner is C

- + - diff --git a/archive/2014/results/QF_ABV.html b/archive/2014/results/QF_ABV.html index f30d1da5..e3bde96e 100644 --- a/archive/2014/results/QF_ABV.html +++ b/archive/2014/results/QF_ABV.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2014 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -207,7 +207,6 @@

Division COMPLE

- + - diff --git a/archive/2014/results/QF_ALIA.html b/archive/2014/results/QF_ALIA.html index 2c0d3400..b7ac4e18 100644 --- a/archive/2014/results/QF_ALIA.html +++ b/archive/2014/results/QF_ALIA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2014 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -171,7 +171,6 @@

Division COMPLETE: The winner is

- + - diff --git a/archive/2014/results/QF_AUFBV.html b/archive/2014/results/QF_AUFBV.html index 78a64c24..96f851e3 100644 --- a/archive/2014/results/QF_AUFBV.html +++ b/archive/2014/results/QF_AUFBV.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2014 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -162,7 +162,6 @@

Division COMPLETE: The winner is C

- + - diff --git a/archive/2014/results/QF_AUFLIA-app.html b/archive/2014/results/QF_AUFLIA-app.html index 91e791a6..99bbe087 100644 --- a/archive/2014/results/QF_AUFLIA-app.html +++ b/archive/2014/results/QF_AUFLIA-app.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2014 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -162,7 +162,6 @@

Parallel Performance

- + - diff --git a/archive/2014/results/QF_AUFLIA.html b/archive/2014/results/QF_AUFLIA.html index e611e39e..a8f37191 100644 --- a/archive/2014/results/QF_AUFLIA.html +++ b/archive/2014/results/QF_AUFLIA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2014 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -180,7 +180,6 @@

Division COMPLETE: The winner is

- + - diff --git a/archive/2014/results/QF_AX.html b/archive/2014/results/QF_AX.html index 74753d21..f4cfbdfc 100644 --- a/archive/2014/results/QF_AX.html +++ b/archive/2014/results/QF_AX.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2014 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -171,7 +171,6 @@

Division COMPLETE: The winner is

- + - diff --git a/archive/2014/results/QF_BV-app.html b/archive/2014/results/QF_BV-app.html index f12327c4..c3bc5edd 100644 --- a/archive/2014/results/QF_BV-app.html +++ b/archive/2014/results/QF_BV-app.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2014 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -155,7 +155,6 @@

Parallel Performance

- + - diff --git a/archive/2014/results/QF_BV.html b/archive/2014/results/QF_BV.html index 4d0695a3..7c810285 100644 --- a/archive/2014/results/QF_BV.html +++ b/archive/2014/results/QF_BV.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2014 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -225,7 +225,6 @@

Divisio

- + - diff --git a/archive/2014/results/QF_IDL.html b/archive/2014/results/QF_IDL.html index c730aec7..eaa28f71 100644 --- a/archive/2014/results/QF_IDL.html +++ b/archive/2014/results/QF_IDL.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2014 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -162,7 +162,6 @@

Division COMPLETE: The winner is

- + - diff --git a/archive/2014/results/QF_LIA-app.html b/archive/2014/results/QF_LIA-app.html index 3e72ed39..6fc3bc3d 100644 --- a/archive/2014/results/QF_LIA-app.html +++ b/archive/2014/results/QF_LIA-app.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2014 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -162,7 +162,6 @@

Parallel Performance

- + - diff --git a/archive/2014/results/QF_LIA.html b/archive/2014/results/QF_LIA.html index 2e01c659..acca1649 100644 --- a/archive/2014/results/QF_LIA.html +++ b/archive/2014/results/QF_LIA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2014 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -189,7 +189,6 @@

Division COMPLETE: The winn

- + - diff --git a/archive/2014/results/QF_LRA-app.html b/archive/2014/results/QF_LRA-app.html index 09950c0b..38b984d6 100644 --- a/archive/2014/results/QF_LRA-app.html +++ b/archive/2014/results/QF_LRA-app.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2014 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -162,7 +162,6 @@

Parallel Performance

- + - diff --git a/archive/2014/results/QF_LRA.html b/archive/2014/results/QF_LRA.html index a70f61e0..fb6343e2 100644 --- a/archive/2014/results/QF_LRA.html +++ b/archive/2014/results/QF_LRA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2014 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -180,7 +180,6 @@

Division COMPLETE: The winner is C

- + - diff --git a/archive/2014/results/QF_NIA.html b/archive/2014/results/QF_NIA.html index 901b13d1..29919ac7 100644 --- a/archive/2014/results/QF_NIA.html +++ b/archive/2014/results/QF_NIA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2014 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -162,7 +162,6 @@

Division COMPLETE: The winner is

- + - diff --git a/archive/2014/results/QF_NRA.html b/archive/2014/results/QF_NRA.html index 5772058b..45980f58 100644 --- a/archive/2014/results/QF_NRA.html +++ b/archive/2014/results/QF_NRA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2014 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -162,7 +162,6 @@

Division COMPLETE: The winner is C

- + - diff --git a/archive/2014/results/QF_RDL.html b/archive/2014/results/QF_RDL.html index 010eb74f..f2d267e1 100644 --- a/archive/2014/results/QF_RDL.html +++ b/archive/2014/results/QF_RDL.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2014 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -162,7 +162,6 @@

Division COMPLETE: The winner is

- + - diff --git a/archive/2014/results/QF_UF.html b/archive/2014/results/QF_UF.html index ee67315a..5b59aa60 100644 --- a/archive/2014/results/QF_UF.html +++ b/archive/2014/results/QF_UF.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2014 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -189,7 +189,6 @@

Division COMPLETE: The winner is

- + - diff --git a/archive/2014/results/QF_UFBV.html b/archive/2014/results/QF_UFBV.html index c88dd605..f0e73e8c 100644 --- a/archive/2014/results/QF_UFBV.html +++ b/archive/2014/results/QF_UFBV.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2014 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -162,7 +162,6 @@

Division COMPLETE: The winner is

- + - diff --git a/archive/2014/results/QF_UFIDL.html b/archive/2014/results/QF_UFIDL.html index 6d3721ec..4b61d6aa 100644 --- a/archive/2014/results/QF_UFIDL.html +++ b/archive/2014/results/QF_UFIDL.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2014 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -162,7 +162,6 @@

Division COMPLETE: The winner is

- + - diff --git a/archive/2014/results/QF_UFLIA-app.html b/archive/2014/results/QF_UFLIA-app.html index 5657a975..0bdaa3c0 100644 --- a/archive/2014/results/QF_UFLIA-app.html +++ b/archive/2014/results/QF_UFLIA-app.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2014 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -162,7 +162,6 @@

Parallel Performance

- + - diff --git a/archive/2014/results/QF_UFLIA.html b/archive/2014/results/QF_UFLIA.html index 9ab5222e..9157552d 100644 --- a/archive/2014/results/QF_UFLIA.html +++ b/archive/2014/results/QF_UFLIA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2014 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -180,7 +180,6 @@

Division COMPLETE: The winner is

- + - diff --git a/archive/2014/results/QF_UFLRA-app.html b/archive/2014/results/QF_UFLRA-app.html index 276dcec1..67319445 100644 --- a/archive/2014/results/QF_UFLRA-app.html +++ b/archive/2014/results/QF_UFLRA-app.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2014 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -162,7 +162,6 @@

Parallel Performance

- + - diff --git a/archive/2014/results/QF_UFLRA.html b/archive/2014/results/QF_UFLRA.html index ced9fa61..8a77b6aa 100644 --- a/archive/2014/results/QF_UFLRA.html +++ b/archive/2014/results/QF_UFLRA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2014 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -180,7 +180,6 @@

Division COMPLETE: The winner is

- + - diff --git a/archive/2014/results/QF_UFNIA.html b/archive/2014/results/QF_UFNIA.html index f8b917ca..5b83385a 100644 --- a/archive/2014/results/QF_UFNIA.html +++ b/archive/2014/results/QF_UFNIA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2014 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -149,7 +149,6 @@

Division COMPLETE: The winner is C

- + - diff --git a/archive/2014/results/QF_UFNRA.html b/archive/2014/results/QF_UFNRA.html index 2ac12acc..9f58ce57 100644 --- a/archive/2014/results/QF_UFNRA.html +++ b/archive/2014/results/QF_UFNRA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2014 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -149,7 +149,6 @@

Division COMPLETE: The winner is C

- + - diff --git a/archive/2014/results/SLCOMP.html b/archive/2014/results/SLCOMP.html index ec6e1a52..3e5e431a 100644 --- a/archive/2014/results/SLCOMP.html +++ b/archive/2014/results/SLCOMP.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2014 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -325,7 +325,6 @@

Division COMPLETE: The winner

- + - diff --git a/archive/2014/results/UF.html b/archive/2014/results/UF.html index d4b10c08..6e298ea6 100644 --- a/archive/2014/results/UF.html +++ b/archive/2014/results/UF.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2014 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -162,7 +162,6 @@

Division COMPLETE: The winner is C

- + - diff --git a/archive/2014/results/UFBV.html b/archive/2014/results/UFBV.html index de39db65..b4c61675 100644 --- a/archive/2014/results/UFBV.html +++ b/archive/2014/results/UFBV.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2014 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -112,7 +112,6 @@

UFBV (Main Track)

- + - diff --git a/archive/2014/results/UFIDL.html b/archive/2014/results/UFIDL.html index 1fe2d1bf..676cb297 100644 --- a/archive/2014/results/UFIDL.html +++ b/archive/2014/results/UFIDL.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2014 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -149,7 +149,6 @@

Division COMPLETE: The winner is C

- + - diff --git a/archive/2014/results/UFLIA.html b/archive/2014/results/UFLIA.html index 6ebf2b69..35d76a7c 100644 --- a/archive/2014/results/UFLIA.html +++ b/archive/2014/results/UFLIA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2014 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -162,7 +162,6 @@

Division COMPLETE: The winner is C

- + - diff --git a/archive/2014/results/UFLRA-app.html b/archive/2014/results/UFLRA-app.html index 8dc354c6..2c80d03e 100644 --- a/archive/2014/results/UFLRA-app.html +++ b/archive/2014/results/UFLRA-app.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2014 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -148,7 +148,6 @@

Parallel Performance

- + - diff --git a/archive/2014/results/UFLRA.html b/archive/2014/results/UFLRA.html index 0a0d756d..344cc40d 100644 --- a/archive/2014/results/UFLRA.html +++ b/archive/2014/results/UFLRA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2014 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -162,7 +162,6 @@

Division COMPLETE: The winner is

- + - diff --git a/archive/2014/results/UFNIA.html b/archive/2014/results/UFNIA.html index 741d350e..4cbbf506 100644 --- a/archive/2014/results/UFNIA.html +++ b/archive/2014/results/UFNIA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2014 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -149,7 +149,6 @@

Division COMPLETE: The winner is C

- + - diff --git a/archive/2014/results/gold.html b/archive/2014/results/gold.html index 55030e72..0616214e 100644 --- a/archive/2014/results/gold.html +++ b/archive/2014/results/gold.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2014 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -332,7 +332,6 @@

An Alternate Scoring Metric

- + - diff --git a/archive/2014/results/summary-app.html b/archive/2014/results/summary-app.html index 25ace896..ea3cb01d 100644 --- a/archive/2014/results/summary-app.html +++ b/archive/2014/results/summary-app.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2014 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -161,7 +161,6 @@

Application Track (Summary)

- + - diff --git a/archive/2014/results/summary.html b/archive/2014/results/summary.html index 15cd3f6e..94f6eda2 100644 --- a/archive/2014/results/summary.html +++ b/archive/2014/results/summary.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2014 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -433,7 +433,6 @@

Main Track (Summary)

- + - diff --git a/archive/2014/specs.html b/archive/2014/specs.html index 638ce167..1b6279fd 100644 --- a/archive/2014/specs.html +++ b/archive/2014/specs.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2014 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -107,7 +107,6 @@

Machine Specifications

- + - diff --git a/archive/2014/system-descriptions/Asterix.txt b/archive/2014/system-descriptions/Asterix.txt index 4022604d..f15d46ff 100644 --- a/archive/2014/system-descriptions/Asterix.txt +++ b/archive/2014/system-descriptions/Asterix.txt @@ -9,4 +9,4 @@ with Z3 and implements support for the acyclic list segment predicate only. Details about the algorithm and its correctness are described in J. A. Navarro Perez and A. Rybalchenko. Separation Logic Modulo Theories. -In Proc. APLAS13, 2013. \ No newline at end of file +In Proc. APLAS13, 2013. diff --git a/archive/2014/system-descriptions/OpenSMT.txt b/archive/2014/system-descriptions/OpenSMT.txt index 8b4820ed..16644d2c 100644 --- a/archive/2014/system-descriptions/OpenSMT.txt +++ b/archive/2014/system-descriptions/OpenSMT.txt @@ -22,4 +22,3 @@ correctness. While the version competing in the SMT-COMP 2014 still suffers from certain inefficiencies, such as a naive implementation of theory propagation, it is already in many respects comparable to the state-of-the-art in its field. - diff --git a/archive/2014/system-descriptions/kleaver-description.txt b/archive/2014/system-descriptions/kleaver-description.txt index 59286932..6dfbcec8 100644 --- a/archive/2014/system-descriptions/kleaver-description.txt +++ b/archive/2014/system-descriptions/kleaver-description.txt @@ -6,9 +6,9 @@ solver. These optimizations are described in more detail in [EXE-CCS-06], [KLEE-OSDI-08] and [MultiSolver-CAV-13]. The competition version has caching disabled (since it is only beneficial when sequences of queries are issued), and uses STP (the default solver of -KLEE) as the underlying SMT solver. The portfolio version of Kleaver -employs Boolector, STP and Z3 through the metaSMT solver -framework [MetaSMT-DIFTS-11]. +KLEE) as the underlying SMT solver. The portfolio version of Kleaver +employs Boolector, STP and Z3 through the metaSMT solver +framework [MetaSMT-DIFTS-11]. Kleaver is available as open-source as part of KLEE, and can be downloaded from http://klee.github.io/klee/. It was originally built as @@ -31,10 +31,7 @@ and Implementation(OSDI) 2008: 209-224. Cadar. Multi-solver Support in Symbolic Execution. International Conference on Computer Aided Verification (CAV) 2013: 53-68. -[MetaSMT-DIFTS-11] Finn Haedicke, Stefan Frehse, Goerschwin Fey, Daniel -Grosse, Rolf Drechsler. metaSMT: Focus on your application not on solver -integration. International Workshop on Design and Implementation of Formal +[MetaSMT-DIFTS-11] Finn Haedicke, Stefan Frehse, Goerschwin Fey, Daniel +Grosse, Rolf Drechsler. metaSMT: Focus on your application not on solver +integration. International Workshop on Design and Implementation of Formal Tools and Systems (DIFTS) 2011. - - - diff --git a/archive/2014/tools.html b/archive/2014/tools.html index a98ca417..bdae2bc7 100644 --- a/archive/2014/tools.html +++ b/archive/2014/tools.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2014 Rules
Benchmarks
Tools
Specs
Participants
Results
Report

- + @@ -71,8 +71,8 @@

Tools

Pre-Processor (Benchmark Scrambler)

GitHub Repository
Sources

SMT-COMP 2014 Releases

    - - + +
  • Main Track
    • Binary @@ -80,8 +80,8 @@

      SMT-COMP 2014 Releases

      available on StarExec as SMT-COMP 2014 Scrambler (id: 120)
  • - - + +
  • Application Track
    • Binary @@ -89,15 +89,15 @@

      SMT-COMP 2014 Releases

      available on StarExec as SMT-COMP 2014 Application Scrambler (id: 134)
  • - - + +

Post-Processor

GitHub Repository

SMT-COMP 2014 Releases

    - - + +
  • Application Track
    • Binary @@ -105,8 +105,8 @@

      SMT-COMP 2014 Releases

      available on StarExec as SMT-COMP 2014 Application Track (id: 140)
  • - - + +

Trace executor

GitHub Repository
Sources
@@ -114,7 +114,7 @@

Trace executor

G
All solvers wrapped with the Trace executor are available
here.

- + @@ -127,7 +127,6 @@

Trace executor

G
- + - diff --git a/archive/2015/benchmarks.html b/archive/2015/benchmarks.html index d08ae443..3f673f02 100644 --- a/archive/2015/benchmarks.html +++ b/archive/2015/benchmarks.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -488,7 +488,6 @@

Application Track

- + - diff --git a/archive/2015/call-for-comments.txt b/archive/2015/call-for-comments.txt index 5192dd20..23a3c98d 100644 --- a/archive/2015/call-for-comments.txt +++ b/archive/2015/call-for-comments.txt @@ -30,7 +30,7 @@ committee. The organizing team for SMT-COMP'15 is This is a call for three things: -CALL FOR COMMENTS: +CALL FOR COMMENTS: The organizing team is preparing the schedule and rules for 2015. Any comments you may have to improve the competition over past years diff --git a/archive/2015/index.html b/archive/2015/index.html index 2b4709b6..2b08f719 100644 --- a/archive/2015/index.html +++ b/archive/2015/index.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -109,7 +109,6 @@

Organizers

- + - diff --git a/archive/2015/participants.html b/archive/2015/participants.html index 05760d50..bd6af220 100644 --- a/archive/2015/participants.html +++ b/archive/2015/participants.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -591,7 +591,6 @@

Divisions

- + - diff --git a/archive/2015/results.html b/archive/2015/results.html index ace35e52..afb4358b 100644 --- a/archive/2015/results.html +++ b/archive/2015/results.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -199,7 +199,6 @@

SMT-COMP 2015 Results

- + - diff --git a/archive/2015/results/ALIA-app.html b/archive/2015/results/ALIA-app.html index 8bce6696..6434c04d 100644 --- a/archive/2015/results/ALIA-app.html +++ b/archive/2015/results/ALIA-app.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -98,7 +98,7 @@

ALIA (Application Track)

Competition results for the ALIA division as of Fri Oct 30 12:49:29 GMT

-

Competition benchmarks = 24 +

Competition benchmarks = 24
Competition industrial benchmarks = 24

Non-Competitive division

@@ -195,7 +195,6 @@

Parallel Performance (industrial)

- + - diff --git a/archive/2015/results/ALIA.html b/archive/2015/results/ALIA.html index 064c10fd..1d683cf7 100644 --- a/archive/2015/results/ALIA.html +++ b/archive/2015/results/ALIA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -98,7 +98,7 @@

ALIA (Main Track)

Competition results for the ALIA division as of Fri Oct 30 12:49:29 GMT

-

Competition benchmarks = 42 +

Competition benchmarks = 42
Competition industrial benchmarks = 42

The winners for this division are:

@@ -341,7 +341,6 @@

Other Information

- + - diff --git a/archive/2015/results/ANIA-app.html b/archive/2015/results/ANIA-app.html index b0b54407..82391b35 100644 --- a/archive/2015/results/ANIA-app.html +++ b/archive/2015/results/ANIA-app.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -98,7 +98,7 @@

ANIA (Application Track)

Competition results for the ANIA division as of Fri Oct 30 12:49:29 GMT

-

Competition benchmarks = 3 +

Competition benchmarks = 3
Competition industrial benchmarks = 3

Non-Competitive division

@@ -181,7 +181,6 @@

Parallel Performance (industrial)

- + - diff --git a/archive/2015/results/AUFLIA.html b/archive/2015/results/AUFLIA.html index 7f7f8247..7456d4ab 100644 --- a/archive/2015/results/AUFLIA.html +++ b/archive/2015/results/AUFLIA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -98,7 +98,7 @@

AUFLIA (Main Track)

Competition results for the AUFLIA division as of Fri Oct 30 12:49:29 GMT

-

Competition benchmarks = 4 +

Competition benchmarks = 4
Competition industrial benchmarks = 1

The winners for this division are:

@@ -341,7 +341,6 @@

Other Information

- + - diff --git a/archive/2015/results/AUFLIRA.html b/archive/2015/results/AUFLIRA.html index 85f6122d..7ecdab96 100644 --- a/archive/2015/results/AUFLIRA.html +++ b/archive/2015/results/AUFLIRA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -98,7 +98,7 @@

AUFLIRA (Main Track)

Competition results for the AUFLIRA division as of Fri Oct 30 12:49:29 GMT

-

Competition benchmarks = 19849 +

Competition benchmarks = 19849
Competition industrial benchmarks = 19649

The winners for this division are:

@@ -339,7 +339,6 @@

Other Information

- + - diff --git a/archive/2015/results/AUFNIRA.html b/archive/2015/results/AUFNIRA.html index 9f54a016..7346990a 100644 --- a/archive/2015/results/AUFNIRA.html +++ b/archive/2015/results/AUFNIRA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -98,7 +98,7 @@

AUFNIRA (Main Track)

Competition results for the AUFNIRA division as of Fri Oct 30 12:49:29 GMT

-

Competition benchmarks = 1050 +

Competition benchmarks = 1050
Competition industrial benchmarks = 1050

Non-Competitive division

@@ -291,7 +291,6 @@

Other Information

- + - diff --git a/archive/2015/results/BV.html b/archive/2015/results/BV.html index 765b50c5..1febb9a6 100644 --- a/archive/2015/results/BV.html +++ b/archive/2015/results/BV.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -98,7 +98,7 @@

BV (Main Track)

Competition results for the BV division as of Fri Oct 30 12:49:29 GMT

-

Competition benchmarks = 85 +

Competition benchmarks = 85
Competition industrial benchmarks = 85

Non-Competitive division

@@ -292,7 +292,6 @@

Other Information

- + - diff --git a/archive/2015/results/LIA-app.html b/archive/2015/results/LIA-app.html index 11a2282f..afadb52c 100644 --- a/archive/2015/results/LIA-app.html +++ b/archive/2015/results/LIA-app.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -98,7 +98,7 @@

LIA (Application Track)

Competition results for the LIA division as of Fri Oct 30 12:49:29 GMT

-

Competition benchmarks = 6 +

Competition benchmarks = 6
Competition industrial benchmarks = 6

Non-Competitive division

@@ -195,7 +195,6 @@

Parallel Performance (industrial)

- + - diff --git a/archive/2015/results/LIA.html b/archive/2015/results/LIA.html index 2fb484ae..60e9a58c 100644 --- a/archive/2015/results/LIA.html +++ b/archive/2015/results/LIA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -98,7 +98,7 @@

LIA (Main Track)

Competition results for the LIA division as of Fri Oct 30 12:49:29 GMT

-

Competition benchmarks = 201 +

Competition benchmarks = 201
Competition industrial benchmarks = 201

The winners for this division are:

@@ -339,7 +339,6 @@

Other Information

- + - diff --git a/archive/2015/results/LRA.html b/archive/2015/results/LRA.html index 3a3bc99f..01ba65fd 100644 --- a/archive/2015/results/LRA.html +++ b/archive/2015/results/LRA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -98,7 +98,7 @@

LRA (Main Track)

Competition results for the LRA division as of Fri Oct 30 12:49:29 GMT

-

Competition benchmarks = 339 +

Competition benchmarks = 339
Competition industrial benchmarks = 261

The winners for this division are:

@@ -339,7 +339,6 @@

Other Information

- + - diff --git a/archive/2015/results/NIA.html b/archive/2015/results/NIA.html index 0241771f..2d7f3cbf 100644 --- a/archive/2015/results/NIA.html +++ b/archive/2015/results/NIA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -98,7 +98,7 @@

NIA (Main Track)

Competition results for the NIA division as of Fri Oct 30 12:49:29 GMT

-

Competition benchmarks = 9 +

Competition benchmarks = 9
Competition industrial benchmarks = 9

Non-Competitive division

@@ -291,7 +291,6 @@

Other Information

- + - diff --git a/archive/2015/results/NRA.html b/archive/2015/results/NRA.html index 97a19842..7c3e2dcf 100644 --- a/archive/2015/results/NRA.html +++ b/archive/2015/results/NRA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -98,7 +98,7 @@

NRA (Main Track)

Competition results for the NRA division as of Fri Oct 30 12:49:29 GMT

-

Competition benchmarks = 3788 +

Competition benchmarks = 3788
Competition industrial benchmarks = 3788

Non-Competitive division

@@ -292,7 +292,6 @@

Other Information

- + - diff --git a/archive/2015/results/QF_ABV.html b/archive/2015/results/QF_ABV.html index 083df9a2..f0872169 100644 --- a/archive/2015/results/QF_ABV.html +++ b/archive/2015/results/QF_ABV.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -98,7 +98,7 @@

QF_ABV (Main Track)

Competition results for the QF_ABV division as of Fri Oct 30 12:49:29 GMT

-

Competition benchmarks = 14720 +

Competition benchmarks = 14720 Competition industrial benchmarks = 14366

The winners for this division are:

@@ -370,7 +370,6 @@

Other Information

- + - diff --git a/archive/2015/results/QF_ALIA-app.html b/archive/2015/results/QF_ALIA-app.html index a3bdb9d7..348a891f 100644 --- a/archive/2015/results/QF_ALIA-app.html +++ b/archive/2015/results/QF_ALIA-app.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -98,7 +98,7 @@

QF_ALIA (Application Track)

Competition results for the QF_ALIA division as of Fri Oct 30 12:49:29 GMT

-

Competition benchmarks = 44 +

Competition benchmarks = 44
Competition industrial benchmarks = 44

The winner is : Yices

@@ -224,7 +224,6 @@

Parallel Performance (industrial)

- + - diff --git a/archive/2015/results/QF_ALIA.html b/archive/2015/results/QF_ALIA.html index 785848ac..4f01be97 100644 --- a/archive/2015/results/QF_ALIA.html +++ b/archive/2015/results/QF_ALIA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -98,7 +98,7 @@

QF_ALIA (Main Track)

Competition results for the QF_ALIA division as of Fri Oct 30 12:49:29 GMT

-

Competition benchmarks = 134 +

Competition benchmarks = 134
Competition industrial benchmarks = 103

The winners for this division are:

@@ -399,7 +399,6 @@

Other Information

- + - diff --git a/archive/2015/results/QF_ANIA-app.html b/archive/2015/results/QF_ANIA-app.html index 4009341a..26ee1d10 100644 --- a/archive/2015/results/QF_ANIA-app.html +++ b/archive/2015/results/QF_ANIA-app.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -98,7 +98,7 @@

QF_ANIA (Application Track)

Competition results for the QF_ANIA division as of Fri Oct 30 12:49:29 GMT

-

Competition benchmarks = 5 +

Competition benchmarks = 5
Competition industrial benchmarks = 5

Non-Competitive division

@@ -180,7 +180,6 @@

Parallel Performance (industrial)

- + - diff --git a/archive/2015/results/QF_ANIA.html b/archive/2015/results/QF_ANIA.html index f5e90a70..69e0b3cc 100644 --- a/archive/2015/results/QF_ANIA.html +++ b/archive/2015/results/QF_ANIA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -98,7 +98,7 @@

QF_ANIA (Main Track)

Competition results for the QF_ANIA division as of Fri Oct 30 12:49:29 GMT

-

Competition benchmarks = 6 +

Competition benchmarks = 6
Competition industrial benchmarks = 6

Non-Competitive division

@@ -259,7 +259,6 @@

Other Information

- + - diff --git a/archive/2015/results/QF_AUFBV.html b/archive/2015/results/QF_AUFBV.html index af8d305b..239fe46d 100644 --- a/archive/2015/results/QF_AUFBV.html +++ b/archive/2015/results/QF_AUFBV.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -98,7 +98,7 @@

QF_AUFBV (Main Track)

Competition results for the QF_AUFBV division as of Fri Oct 30 12:49:29 GMT

-

Competition benchmarks = 37 +

Competition benchmarks = 37
Competition industrial benchmarks = 37

The winners for this division are:

@@ -368,7 +368,6 @@

Other Information

- + - diff --git a/archive/2015/results/QF_AUFLIA-app.html b/archive/2015/results/QF_AUFLIA-app.html index 3191d821..e3a2f223 100644 --- a/archive/2015/results/QF_AUFLIA-app.html +++ b/archive/2015/results/QF_AUFLIA-app.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -98,7 +98,7 @@

QF_AUFLIA (Application Track)

Competition results for the QF_AUFLIA division as of Fri Oct 30 12:49:29 GMT

-

Competition benchmarks = 72 +

Competition benchmarks = 72
Competition industrial benchmarks = 72

The winner is : Yices

@@ -224,7 +224,6 @@

Parallel Performance (industrial)

- + - diff --git a/archive/2015/results/QF_AUFLIA.html b/archive/2015/results/QF_AUFLIA.html index 55580d91..76dc7420 100644 --- a/archive/2015/results/QF_AUFLIA.html +++ b/archive/2015/results/QF_AUFLIA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -98,7 +98,7 @@

QF_AUFLIA (Main Track)

Competition results for the QF_AUFLIA division as of Fri Oct 30 12:49:29 GMT

-

Competition benchmarks = 1009 +

Competition benchmarks = 1009
Competition industrial benchmarks = 19

The winners for this division are:

@@ -399,7 +399,6 @@

Other Information

- + - diff --git a/archive/2015/results/QF_AUFNIA.html b/archive/2015/results/QF_AUFNIA.html index 5cd01992..d203c611 100644 --- a/archive/2015/results/QF_AUFNIA.html +++ b/archive/2015/results/QF_AUFNIA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -98,7 +98,7 @@

QF_AUFNIA (Main Track)

Competition results for the QF_AUFNIA division as of Fri Oct 30 12:49:29 GMT

-

Competition benchmarks = 21 +

Competition benchmarks = 21
Competition industrial benchmarks = 21

Non-Competitive division

@@ -259,7 +259,6 @@

Other Information

- + - diff --git a/archive/2015/results/QF_AX.html b/archive/2015/results/QF_AX.html index 9d47dc4e..c3e299f2 100644 --- a/archive/2015/results/QF_AX.html +++ b/archive/2015/results/QF_AX.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -98,7 +98,7 @@

QF_AX (Main Track)

Competition results for the QF_AX division as of Fri Oct 30 12:49:29 GMT

-

Competition benchmarks = 551 +

Competition benchmarks = 551
Competition industrial benchmarks = 0

The winners for this division are:

@@ -399,7 +399,6 @@

Other Information

- + - diff --git a/archive/2015/results/QF_BV-app.html b/archive/2015/results/QF_BV-app.html index fd9c56ac..e1a1ac98 100644 --- a/archive/2015/results/QF_BV-app.html +++ b/archive/2015/results/QF_BV-app.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -98,7 +98,7 @@

QF_BV (Application Track)

Competition results for the QF_BV division as of Fri Oct 30 12:49:29 GMT

-

Competition benchmarks = 18 +

Competition benchmarks = 18
Competition industrial benchmarks = 18

The winner is : Yices

@@ -294,7 +294,6 @@

Parallel Performance (industrial)

- + - diff --git a/archive/2015/results/QF_BV.html b/archive/2015/results/QF_BV.html index fba5720f..2e604096 100644 --- a/archive/2015/results/QF_BV.html +++ b/archive/2015/results/QF_BV.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -98,7 +98,7 @@

QF_BV (Main Track)

Competition results for the QF_BV division as of Fri Oct 30 12:49:29 GMT

-

Competition benchmarks = 26414 +

Competition benchmarks = 26414
Competition industrial benchmarks = 24914

The winners for this division are:

@@ -525,7 +525,6 @@

Other Information

- + - diff --git a/archive/2015/results/QF_BVFP.html b/archive/2015/results/QF_BVFP.html index 893deb6a..f655244d 100644 --- a/archive/2015/results/QF_BVFP.html +++ b/archive/2015/results/QF_BVFP.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -98,7 +98,7 @@

QF_BVFP (Main Track)

Competition results for the QF_BVFP division as of Fri Oct 30 12:49:29 GMT

-

Competition benchmarks = 7 +

Competition benchmarks = 7
Competition industrial benchmarks = 0

Non-Competitive division

@@ -259,7 +259,6 @@

Other Information

- + - diff --git a/archive/2015/results/QF_FP.html b/archive/2015/results/QF_FP.html index 51515705..c1effd9f 100644 --- a/archive/2015/results/QF_FP.html +++ b/archive/2015/results/QF_FP.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -98,7 +98,7 @@

QF_FP (Main Track)

Competition results for the QF_FP division as of Fri Oct 30 12:49:29 GMT

-

Competition benchmarks = 34413 +

Competition benchmarks = 34413
Competition industrial benchmarks = 0

Non-Competitive division

@@ -259,7 +259,6 @@

Other Information

- + - diff --git a/archive/2015/results/QF_IDL.html b/archive/2015/results/QF_IDL.html index c481c9da..34a79adb 100644 --- a/archive/2015/results/QF_IDL.html +++ b/archive/2015/results/QF_IDL.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -98,7 +98,7 @@

QF_IDL (Main Track)

Competition results for the QF_IDL division as of Fri Oct 30 12:49:29 GMT

-

Competition benchmarks = 2094 +

Competition benchmarks = 2094
Competition industrial benchmarks = 1074

The winners for this division are:

@@ -370,7 +370,6 @@

Other Information

- + - diff --git a/archive/2015/results/QF_LIA-app.html b/archive/2015/results/QF_LIA-app.html index ccd646b2..cebe75b1 100644 --- a/archive/2015/results/QF_LIA-app.html +++ b/archive/2015/results/QF_LIA-app.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -98,7 +98,7 @@

QF_LIA (Application Track)

Competition results for the QF_LIA division as of Fri Oct 30 12:49:29 GMT

-

Competition benchmarks = 65 +

Competition benchmarks = 65
Competition industrial benchmarks = 65

The winner is : Yices

@@ -224,7 +224,6 @@

Parallel Performance (industrial)

- + - diff --git a/archive/2015/results/QF_LIA.html b/archive/2015/results/QF_LIA.html index a37e33cc..65062a8d 100644 --- a/archive/2015/results/QF_LIA.html +++ b/archive/2015/results/QF_LIA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -98,7 +98,7 @@

QF_LIA (Main Track)

Competition results for the QF_LIA division as of Fri Oct 30 12:49:29 GMT

-

Competition benchmarks = 5839 +

Competition benchmarks = 5839
Competition industrial benchmarks = 4102

The winners for this division are:

@@ -432,7 +432,6 @@

Other Information

- + - diff --git a/archive/2015/results/QF_LIRA.html b/archive/2015/results/QF_LIRA.html index b7f6a589..93a621b6 100644 --- a/archive/2015/results/QF_LIRA.html +++ b/archive/2015/results/QF_LIRA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -98,7 +98,7 @@

QF_LIRA (Main Track)

Competition results for the QF_LIRA division as of Fri Oct 30 12:49:29 GMT

-

Competition benchmarks = 6 +

Competition benchmarks = 6
Competition industrial benchmarks = 6

The winners for this division are:

@@ -370,7 +370,6 @@

Other Information

- + - diff --git a/archive/2015/results/QF_LRA-app.html b/archive/2015/results/QF_LRA-app.html index fe4bd68c..843848d4 100644 --- a/archive/2015/results/QF_LRA-app.html +++ b/archive/2015/results/QF_LRA-app.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -98,7 +98,7 @@

QF_LRA (Application Track)

Competition results for the QF_LRA division as of Fri Oct 30 12:49:29 GMT

-

Competition benchmarks = 10 +

Competition benchmarks = 10
Competition industrial benchmarks = 10

The winner is : Yices

@@ -224,7 +224,6 @@

Parallel Performance (industrial)

- + - diff --git a/archive/2015/results/QF_LRA.html b/archive/2015/results/QF_LRA.html index 776b3f61..af2ff3dd 100644 --- a/archive/2015/results/QF_LRA.html +++ b/archive/2015/results/QF_LRA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -98,7 +98,7 @@

QF_LRA (Main Track)

Competition results for the QF_LRA division as of Fri Oct 30 12:49:29 GMT

-

Competition benchmarks = 1626 +

Competition benchmarks = 1626
Competition industrial benchmarks = 1624

The winners for this division are:

@@ -432,7 +432,6 @@

Other Information

- + - diff --git a/archive/2015/results/QF_NIA-app.html b/archive/2015/results/QF_NIA-app.html index dca5370c..0b0ac4fe 100644 --- a/archive/2015/results/QF_NIA-app.html +++ b/archive/2015/results/QF_NIA-app.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -98,7 +98,7 @@

QF_NIA (Application Track)

Competition results for the QF_NIA division as of Fri Oct 30 12:49:29 GMT

-

Competition benchmarks = 10 +

Competition benchmarks = 10
Competition industrial benchmarks = 10

Non-Competitive division

@@ -194,7 +194,6 @@

Parallel Performance (industrial)

- + - diff --git a/archive/2015/results/QF_NIA.html b/archive/2015/results/QF_NIA.html index 4a335725..d0f130a3 100644 --- a/archive/2015/results/QF_NIA.html +++ b/archive/2015/results/QF_NIA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -98,7 +98,7 @@

QF_NIA (Main Track)

Competition results for the QF_NIA division as of Fri Oct 30 12:49:29 GMT

-

Competition benchmarks = 8475 +

Competition benchmarks = 8475
Competition industrial benchmarks = 8308

The winners for this division are:

@@ -432,7 +432,6 @@

Other Information

- + - diff --git a/archive/2015/results/QF_NIRA.html b/archive/2015/results/QF_NIRA.html index fe589b7b..bbf0fa21 100644 --- a/archive/2015/results/QF_NIRA.html +++ b/archive/2015/results/QF_NIRA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -98,7 +98,7 @@

QF_NIRA (Main Track)

Competition results for the QF_NIRA division as of Fri Oct 30 12:49:29 GMT

-

Competition benchmarks = 2 +

Competition benchmarks = 2
Competition industrial benchmarks = 2

The winners for this division are:

@@ -308,7 +308,6 @@

Other Information

- + - diff --git a/archive/2015/results/QF_NRA.html b/archive/2015/results/QF_NRA.html index 335ce73a..60395652 100644 --- a/archive/2015/results/QF_NRA.html +++ b/archive/2015/results/QF_NRA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -98,7 +98,7 @@

QF_NRA (Main Track)

Competition results for the QF_NRA division as of Fri Oct 30 12:49:29 GMT

-

Competition benchmarks = 10184 +

Competition benchmarks = 10184
Competition industrial benchmarks = 10104

The winners for this division are:

@@ -401,7 +401,6 @@

Other Information

- + - diff --git a/archive/2015/results/QF_RDL.html b/archive/2015/results/QF_RDL.html index 3c8f3908..af23fa41 100644 --- a/archive/2015/results/QF_RDL.html +++ b/archive/2015/results/QF_RDL.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -98,7 +98,7 @@

QF_RDL (Main Track)

Competition results for the QF_RDL division as of Fri Oct 30 12:49:29 GMT

-

Competition benchmarks = 220 +

Competition benchmarks = 220
Competition industrial benchmarks = 145

The winners for this division are:

@@ -370,7 +370,6 @@

Other Information

- + - diff --git a/archive/2015/results/QF_UF.html b/archive/2015/results/QF_UF.html index 37dd3593..a8caadc5 100644 --- a/archive/2015/results/QF_UF.html +++ b/archive/2015/results/QF_UF.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -98,7 +98,7 @@

QF_UF (Main Track)

Competition results for the QF_UF division as of Fri Oct 30 12:49:29 GMT

-

Competition benchmarks = 6649 +

Competition benchmarks = 6649
Competition industrial benchmarks = 3

The winners for this division are:

@@ -494,7 +494,6 @@

Other Information

- + - diff --git a/archive/2015/results/QF_UFBV.html b/archive/2015/results/QF_UFBV.html index b086d377..eb783f3f 100644 --- a/archive/2015/results/QF_UFBV.html +++ b/archive/2015/results/QF_UFBV.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -98,7 +98,7 @@

QF_UFBV (Main Track)

Competition results for the QF_UFBV division as of Fri Oct 30 12:49:29 GMT

-

Competition benchmarks = 31 +

Competition benchmarks = 31
Competition industrial benchmarks = 31

The winners for this division are:

@@ -370,7 +370,6 @@

Other Information

- + - diff --git a/archive/2015/results/QF_UFIDL.html b/archive/2015/results/QF_UFIDL.html index abdff05e..382eb6f6 100644 --- a/archive/2015/results/QF_UFIDL.html +++ b/archive/2015/results/QF_UFIDL.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -98,7 +98,7 @@

QF_UFIDL (Main Track)

Competition results for the QF_UFIDL division as of Fri Oct 30 12:49:29 GMT

-

Competition benchmarks = 441 +

Competition benchmarks = 441
Competition industrial benchmarks = 422

The winners for this division are:

@@ -370,7 +370,6 @@

Other Information

- + - diff --git a/archive/2015/results/QF_UFLIA-app.html b/archive/2015/results/QF_UFLIA-app.html index 6b8cd687..53008d2f 100644 --- a/archive/2015/results/QF_UFLIA-app.html +++ b/archive/2015/results/QF_UFLIA-app.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -98,7 +98,7 @@

QF_UFLIA (Application Track)

Competition results for the QF_UFLIA division as of Fri Oct 30 12:49:29 GMT

-

Competition benchmarks = 905 +

Competition benchmarks = 905
Competition industrial benchmarks = 905

The winner is : CVC4 (exp)

@@ -225,7 +225,6 @@

Parallel Performance (industrial)

- + - diff --git a/archive/2015/results/QF_UFLIA.html b/archive/2015/results/QF_UFLIA.html index 7be15f22..8bf51b45 100644 --- a/archive/2015/results/QF_UFLIA.html +++ b/archive/2015/results/QF_UFLIA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -98,7 +98,7 @@

QF_UFLIA (Main Track)

Competition results for the QF_UFLIA division as of Fri Oct 30 12:49:29 GMT

-

Competition benchmarks = 598 +

Competition benchmarks = 598
Competition industrial benchmarks = 367

The winners for this division are:

@@ -401,7 +401,6 @@

Other Information

- + - diff --git a/archive/2015/results/QF_UFLRA-app.html b/archive/2015/results/QF_UFLRA-app.html index 4c543c73..0428fe50 100644 --- a/archive/2015/results/QF_UFLRA-app.html +++ b/archive/2015/results/QF_UFLRA-app.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -98,7 +98,7 @@

QF_UFLRA (Application Track)

Competition results for the QF_UFLRA division as of Fri Oct 30 12:49:29 GMT

-

Competition benchmarks = 3331 +

Competition benchmarks = 3331
Competition industrial benchmarks = 3331

The winner is : Yices

@@ -224,7 +224,6 @@

Parallel Performance (industrial)

- + - diff --git a/archive/2015/results/QF_UFLRA.html b/archive/2015/results/QF_UFLRA.html index f0ae7aef..7dd1315e 100644 --- a/archive/2015/results/QF_UFLRA.html +++ b/archive/2015/results/QF_UFLRA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -98,7 +98,7 @@

QF_UFLRA (Main Track)

Competition results for the QF_UFLRA division as of Fri Oct 30 12:49:29 GMT

-

Competition benchmarks = 1627 +

Competition benchmarks = 1627
Competition industrial benchmarks = 727

The winners for this division are:

@@ -401,7 +401,6 @@

Other Information

- + - diff --git a/archive/2015/results/QF_UFNIA-app.html b/archive/2015/results/QF_UFNIA-app.html index d9222d27..6746760f 100644 --- a/archive/2015/results/QF_UFNIA-app.html +++ b/archive/2015/results/QF_UFNIA-app.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -98,7 +98,7 @@

QF_UFNIA (Application Track)

Competition results for the QF_UFNIA division as of Fri Oct 30 12:49:29 GMT

-

Competition benchmarks = 1 +

Competition benchmarks = 1
Competition industrial benchmarks = 1

Non-Competitive division

@@ -194,7 +194,6 @@

Parallel Performance (industrial)

- + - diff --git a/archive/2015/results/QF_UFNIA.html b/archive/2015/results/QF_UFNIA.html index 4fe0d89e..5d0450d5 100644 --- a/archive/2015/results/QF_UFNIA.html +++ b/archive/2015/results/QF_UFNIA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -98,7 +98,7 @@

QF_UFNIA (Main Track)

Competition results for the QF_UFNIA division as of Fri Oct 30 12:49:29 GMT

-

Competition benchmarks = 7 +

Competition benchmarks = 7
Competition industrial benchmarks = 7

Non-Competitive division

@@ -291,7 +291,6 @@

Other Information

- + - diff --git a/archive/2015/results/QF_UFNRA.html b/archive/2015/results/QF_UFNRA.html index 2cbdea76..809cf6e7 100644 --- a/archive/2015/results/QF_UFNRA.html +++ b/archive/2015/results/QF_UFNRA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -98,7 +98,7 @@

QF_UFNRA (Main Track)

Competition results for the QF_UFNRA division as of Fri Oct 30 12:49:29 GMT

-

Competition benchmarks = 34 +

Competition benchmarks = 34
Competition industrial benchmarks = 34

Non-Competitive division

@@ -292,7 +292,6 @@

Other Information

- + - diff --git a/archive/2015/results/UF.html b/archive/2015/results/UF.html index 431a04a5..fd131e08 100644 --- a/archive/2015/results/UF.html +++ b/archive/2015/results/UF.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -98,7 +98,7 @@

UF (Main Track)

Competition results for the UF division as of Fri Oct 30 12:49:29 GMT

-

Competition benchmarks = 2839 +

Competition benchmarks = 2839
Competition industrial benchmarks = 2380

The winners for this division are:

@@ -341,7 +341,6 @@

Other Information

- + - diff --git a/archive/2015/results/UFBV.html b/archive/2015/results/UFBV.html index ea865fce..c590474d 100644 --- a/archive/2015/results/UFBV.html +++ b/archive/2015/results/UFBV.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -98,7 +98,7 @@

UFBV (Main Track)

Competition results for the UFBV division as of Fri Oct 30 12:49:29 GMT

-

Competition benchmarks = 71 +

Competition benchmarks = 71
Competition industrial benchmarks = 71

Non-Competitive division

@@ -291,7 +291,6 @@

Other Information

- + - diff --git a/archive/2015/results/UFIDL.html b/archive/2015/results/UFIDL.html index 0ba99bf7..9bb006b4 100644 --- a/archive/2015/results/UFIDL.html +++ b/archive/2015/results/UFIDL.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -98,7 +98,7 @@

UFIDL (Main Track)

Competition results for the UFIDL division as of Fri Oct 30 12:49:29 GMT

-

Competition benchmarks = 68 +

Competition benchmarks = 68
Competition industrial benchmarks = 68

The winners for this division are:

@@ -339,7 +339,6 @@

Other Information

- + - diff --git a/archive/2015/results/UFLIA.html b/archive/2015/results/UFLIA.html index a8697fb6..776df17e 100644 --- a/archive/2015/results/UFLIA.html +++ b/archive/2015/results/UFLIA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -98,7 +98,7 @@

UFLIA (Main Track)

Competition results for the UFLIA division as of Fri Oct 30 12:49:29 GMT

-

Competition benchmarks = 8404 +

Competition benchmarks = 8404
Competition industrial benchmarks = 8017

The winners for this division are:

@@ -339,7 +339,6 @@

Other Information

- + - diff --git a/archive/2015/results/UFLRA-app.html b/archive/2015/results/UFLRA-app.html index 3084659c..9fa4e33c 100644 --- a/archive/2015/results/UFLRA-app.html +++ b/archive/2015/results/UFLRA-app.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -98,7 +98,7 @@

UFLRA (Application Track)

Competition results for the UFLRA division as of Fri Oct 30 12:49:29 GMT

-

Competition benchmarks = 5358 +

Competition benchmarks = 5358
Competition industrial benchmarks = 5358

Non-Competitive division

@@ -195,7 +195,6 @@

Parallel Performance (industrial)

- + - diff --git a/archive/2015/results/UFLRA.html b/archive/2015/results/UFLRA.html index 215a2eeb..d6cd22b2 100644 --- a/archive/2015/results/UFLRA.html +++ b/archive/2015/results/UFLRA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -98,7 +98,7 @@

UFLRA (Main Track)

Competition results for the UFLRA division as of Fri Oct 30 12:49:29 GMT

-

Competition benchmarks = 25 +

Competition benchmarks = 25
Competition industrial benchmarks = 20

The winners for this division are:

@@ -339,7 +339,6 @@

Other Information

- + - diff --git a/archive/2015/results/UFNIA.html b/archive/2015/results/UFNIA.html index f0d0f5fb..8f6aa75d 100644 --- a/archive/2015/results/UFNIA.html +++ b/archive/2015/results/UFNIA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -98,7 +98,7 @@

UFNIA (Main Track)

Competition results for the UFNIA division as of Fri Oct 30 12:49:29 GMT

-

Competition benchmarks = 2319 +

Competition benchmarks = 2319
Competition industrial benchmarks = 2319

Non-Competitive division

@@ -292,7 +292,6 @@

Other Information

- + - diff --git a/archive/2015/results/app-summary.html b/archive/2015/results/app-summary.html index c8a983ed..3c23d5d1 100644 --- a/archive/2015/results/app-summary.html +++ b/archive/2015/results/app-summary.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -187,7 +187,6 @@

Application Track (Summary)

- + - diff --git a/archive/2015/results/competition-app.html b/archive/2015/results/competition-app.html index 71bfeed9..45565561 100644 --- a/archive/2015/results/competition-app.html +++ b/archive/2015/results/competition-app.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -251,7 +251,6 @@

Competition-Wide Sco

- + - diff --git a/archive/2015/results/competition-main.html b/archive/2015/results/competition-main.html index a8318b82..abaa0b13 100644 --- a/archive/2015/results/competition-main.html +++ b/archive/2015/results/competition-main.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -566,7 +566,6 @@

Competition-Wide Scoring fo

- + - diff --git a/archive/2015/results/summary.html b/archive/2015/results/summary.html index 0ae78780..817b49d2 100644 --- a/archive/2015/results/summary.html +++ b/archive/2015/results/summary.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -817,7 +817,6 @@

Main Track (Summary)

- + - diff --git a/archive/2015/specs.html b/archive/2015/specs.html index fe2a2f70..8d5e0ce8 100644 --- a/archive/2015/specs.html +++ b/archive/2015/specs.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -122,7 +122,6 @@

Machine Specifications

- + - diff --git a/archive/2015/specs.md b/archive/2015/specs.md index 3485ab43..62bd1baa 100644 --- a/archive/2015/specs.md +++ b/archive/2015/specs.md @@ -46,4 +46,3 @@ available memory will be capped at 61440 MB. before the competition. Please exercise your solver(s) on a few benchmarks to ensure that they run successfully.

- diff --git a/archive/2015/tools.html b/archive/2015/tools.html index ab6b6774..840eff95 100644 --- a/archive/2015/tools.html +++ b/archive/2015/tools.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2015 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -71,8 +71,8 @@

Tools

Pre-Processor (Benchmark Scrambler)

GitHub Repository
Sources

SMT-COMP 2015 Releases

    - - + +
  • Main Track
    • Binary @@ -80,8 +80,8 @@

      SMT-COMP 2015 Releases

      available on StarExec as SMT-COMP 2015 Scrambler (id: 226)
  • - - + +
  • Application Track
    • Binary @@ -89,15 +89,15 @@

      SMT-COMP 2015 Releases

      available on StarExec as SMT-COMP 2015 Application Scrambler (id: 225)
  • - - + +

Post-Processor

GitHub Repository
Sources

SMT-COMP 2015 Releases

    - - + +
  • Main Track
    • Binary @@ -105,8 +105,8 @@

      SMT-COMP 2015 Releases

      available on StarExec as SMT-COMP 2015 (id: 189)
  • - - + +
  • Application Track
    • Binary @@ -114,8 +114,8 @@

      SMT-COMP 2015 Releases

      available on StarExec as SMT-COMP 2015 Application Track (id: 198)
  • - - + +

Trace executor

GitHub Repository
Sources
Binary
@@ -123,7 +123,7 @@

Trace executor

G
All solvers wrapped with the Trace executor are available
here.

- + @@ -136,7 +136,6 @@

Trace executor

G
- + - diff --git a/archive/2016/benchmarks.html b/archive/2016/benchmarks.html index e014a782..97327f47 100644 --- a/archive/2016/benchmarks.html +++ b/archive/2016/benchmarks.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -697,7 +697,6 @@

Unsat-Core Track

- + - diff --git a/archive/2016/call-for-comments.txt b/archive/2016/call-for-comments.txt index 958d0697..e350e5a4 100644 --- a/archive/2016/call-for-comments.txt +++ b/archive/2016/call-for-comments.txt @@ -31,7 +31,7 @@ committee. The organizing team for SMT-COMP'16 is This is a call for three things: -CALL FOR COMMENTS: +CALL FOR COMMENTS: The organizing team is preparing the schedule and rules for 2016. Any comments you may have to improve the competition over past years diff --git a/archive/2016/index.html b/archive/2016/index.html index 0cd460ce..6e2f8353 100644 --- a/archive/2016/index.html +++ b/archive/2016/index.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -112,7 +112,6 @@

Organizers

- + - diff --git a/archive/2016/participants.html b/archive/2016/participants.html index 3ed58a3b..a1e75bd5 100644 --- a/archive/2016/participants.html +++ b/archive/2016/participants.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -730,7 +730,6 @@

Divisions

- + - diff --git a/archive/2016/results.html b/archive/2016/results.html index c0494360..7bab5baf 100644 --- a/archive/2016/results.html +++ b/archive/2016/results.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -213,7 +213,6 @@

SMT-COMP 2016 Results

- + - diff --git a/archive/2016/results/ALIA-app.html b/archive/2016/results/ALIA-app.html index 33eab57c..d24b805b 100644 --- a/archive/2016/results/ALIA-app.html +++ b/archive/2016/results/ALIA-app.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -148,7 +148,6 @@

Parallel Performance

- + - diff --git a/archive/2016/results/ALIA-ucore.html b/archive/2016/results/ALIA-ucore.html index 48fe2604..24281af4 100644 --- a/archive/2016/results/ALIA-ucore.html +++ b/archive/2016/results/ALIA-ucore.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -148,7 +148,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/ALIA.html b/archive/2016/results/ALIA.html index 4c4ed7f9..a5586112 100644 --- a/archive/2016/results/ALIA.html +++ b/archive/2016/results/ALIA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -229,7 +229,6 @@

Parallel Performance

- + - diff --git a/archive/2016/results/ANIA-app.html b/archive/2016/results/ANIA-app.html index f1f75f4a..8bb46113 100644 --- a/archive/2016/results/ANIA-app.html +++ b/archive/2016/results/ANIA-app.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -148,7 +148,6 @@

Parallel Performance

- + - diff --git a/archive/2016/results/AUFLIA-ucore.html b/archive/2016/results/AUFLIA-ucore.html index 7be51bbb..17a92e0e 100644 --- a/archive/2016/results/AUFLIA-ucore.html +++ b/archive/2016/results/AUFLIA-ucore.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -148,7 +148,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/AUFLIA.html b/archive/2016/results/AUFLIA.html index 54783201..e1510324 100644 --- a/archive/2016/results/AUFLIA.html +++ b/archive/2016/results/AUFLIA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -234,7 +234,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/AUFLIRA-ucore.html b/archive/2016/results/AUFLIRA-ucore.html index 4d2bf04a..40938cd2 100644 --- a/archive/2016/results/AUFLIRA-ucore.html +++ b/archive/2016/results/AUFLIRA-ucore.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -148,7 +148,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/AUFLIRA-unknown.html b/archive/2016/results/AUFLIRA-unknown.html index 81c4a3c3..18165c6f 100644 --- a/archive/2016/results/AUFLIRA-unknown.html +++ b/archive/2016/results/AUFLIRA-unknown.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -160,7 +160,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/AUFLIRA.html b/archive/2016/results/AUFLIRA.html index 7e1830cc..2feb2d60 100644 --- a/archive/2016/results/AUFLIRA.html +++ b/archive/2016/results/AUFLIRA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -227,7 +227,6 @@

Parallel Performance

- + - diff --git a/archive/2016/results/AUFNIRA-ucore.html b/archive/2016/results/AUFNIRA-ucore.html index 5d767ef4..6bcad5db 100644 --- a/archive/2016/results/AUFNIRA-ucore.html +++ b/archive/2016/results/AUFNIRA-ucore.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -138,7 +138,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/AUFNIRA-unknown.html b/archive/2016/results/AUFNIRA-unknown.html index 6c671346..eaf01971 100644 --- a/archive/2016/results/AUFNIRA-unknown.html +++ b/archive/2016/results/AUFNIRA-unknown.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -154,7 +154,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/AUFNIRA.html b/archive/2016/results/AUFNIRA.html index 9498d61a..1ad943e6 100644 --- a/archive/2016/results/AUFNIRA.html +++ b/archive/2016/results/AUFNIRA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -211,7 +211,6 @@

Parallel Performance

- + - diff --git a/archive/2016/results/BV-ucore.html b/archive/2016/results/BV-ucore.html index d279431e..531685c2 100644 --- a/archive/2016/results/BV-ucore.html +++ b/archive/2016/results/BV-ucore.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -140,7 +140,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/BV-unknown.html b/archive/2016/results/BV-unknown.html index 933adbc2..6306ddcb 100644 --- a/archive/2016/results/BV-unknown.html +++ b/archive/2016/results/BV-unknown.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -152,7 +152,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/BV.html b/archive/2016/results/BV.html index 8717360b..e8b6c733 100644 --- a/archive/2016/results/BV.html +++ b/archive/2016/results/BV.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -213,7 +213,6 @@

Parallel Performance

- + - diff --git a/archive/2016/results/LIA-app.html b/archive/2016/results/LIA-app.html index ac94b7c3..abbe0337 100644 --- a/archive/2016/results/LIA-app.html +++ b/archive/2016/results/LIA-app.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -148,7 +148,6 @@

Parallel Performance

- + - diff --git a/archive/2016/results/LIA-ucore.html b/archive/2016/results/LIA-ucore.html index 1ba5e8f9..4e856202 100644 --- a/archive/2016/results/LIA-ucore.html +++ b/archive/2016/results/LIA-ucore.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -148,7 +148,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/LIA-unknown.html b/archive/2016/results/LIA-unknown.html index 617a4756..0e29cc84 100644 --- a/archive/2016/results/LIA-unknown.html +++ b/archive/2016/results/LIA-unknown.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -166,7 +166,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/LIA.html b/archive/2016/results/LIA.html index ab390003..5b1b2b4c 100644 --- a/archive/2016/results/LIA.html +++ b/archive/2016/results/LIA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -239,7 +239,6 @@

Parallel Performance

- + - diff --git a/archive/2016/results/LRA-ucore.html b/archive/2016/results/LRA-ucore.html index 43202c38..95811889 100644 --- a/archive/2016/results/LRA-ucore.html +++ b/archive/2016/results/LRA-ucore.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -149,7 +149,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/LRA-unknown.html b/archive/2016/results/LRA-unknown.html index a95e4cc6..6d2233ef 100644 --- a/archive/2016/results/LRA-unknown.html +++ b/archive/2016/results/LRA-unknown.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -160,7 +160,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/LRA.html b/archive/2016/results/LRA.html index c77037e5..f5717850 100644 --- a/archive/2016/results/LRA.html +++ b/archive/2016/results/LRA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -227,7 +227,6 @@

Parallel Performance

- + - diff --git a/archive/2016/results/NIA-ucore.html b/archive/2016/results/NIA-ucore.html index c666325e..2487fa19 100644 --- a/archive/2016/results/NIA-ucore.html +++ b/archive/2016/results/NIA-ucore.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -140,7 +140,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/NIA-unknown.html b/archive/2016/results/NIA-unknown.html index 658ccc41..a55aaa6b 100644 --- a/archive/2016/results/NIA-unknown.html +++ b/archive/2016/results/NIA-unknown.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -160,7 +160,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/NIA.html b/archive/2016/results/NIA.html index b283882f..fdfd5c09 100644 --- a/archive/2016/results/NIA.html +++ b/archive/2016/results/NIA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -227,7 +227,6 @@

Parallel Performance

- + - diff --git a/archive/2016/results/NRA-ucore.html b/archive/2016/results/NRA-ucore.html index 306ca8a0..0d03c57b 100644 --- a/archive/2016/results/NRA-ucore.html +++ b/archive/2016/results/NRA-ucore.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -140,7 +140,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/NRA-unknown.html b/archive/2016/results/NRA-unknown.html index f394346a..c15523eb 100644 --- a/archive/2016/results/NRA-unknown.html +++ b/archive/2016/results/NRA-unknown.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -154,7 +154,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/NRA.html b/archive/2016/results/NRA.html index 23adcc78..59e93675 100644 --- a/archive/2016/results/NRA.html +++ b/archive/2016/results/NRA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -213,7 +213,6 @@

Parallel Performance

- + - diff --git a/archive/2016/results/QF_ABV-ucore.html b/archive/2016/results/QF_ABV-ucore.html index aaa2aba9..a34a12a0 100644 --- a/archive/2016/results/QF_ABV-ucore.html +++ b/archive/2016/results/QF_ABV-ucore.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -149,7 +149,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/QF_ABV-unknown.html b/archive/2016/results/QF_ABV-unknown.html index ba54d0ad..5c4de90e 100644 --- a/archive/2016/results/QF_ABV-unknown.html +++ b/archive/2016/results/QF_ABV-unknown.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -161,7 +161,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/QF_ABV.html b/archive/2016/results/QF_ABV.html index cfb15b5e..01e37cdc 100644 --- a/archive/2016/results/QF_ABV.html +++ b/archive/2016/results/QF_ABV.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -227,7 +227,6 @@

Parallel Performance

- + - diff --git a/archive/2016/results/QF_ALIA-app.html b/archive/2016/results/QF_ALIA-app.html index 7d7759e0..69199d12 100644 --- a/archive/2016/results/QF_ALIA-app.html +++ b/archive/2016/results/QF_ALIA-app.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -170,7 +170,6 @@

Parallel Performance

- + - diff --git a/archive/2016/results/QF_ALIA-ucore.html b/archive/2016/results/QF_ALIA-ucore.html index 87053f23..8d9ac1ce 100644 --- a/archive/2016/results/QF_ALIA-ucore.html +++ b/archive/2016/results/QF_ALIA-ucore.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -161,7 +161,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/QF_ALIA.html b/archive/2016/results/QF_ALIA.html index ff64934b..40936f34 100644 --- a/archive/2016/results/QF_ALIA.html +++ b/archive/2016/results/QF_ALIA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -243,7 +243,6 @@

Parallel Performance

- + - diff --git a/archive/2016/results/QF_ANIA-app.html b/archive/2016/results/QF_ANIA-app.html index 6f3f0ae0..9508beb4 100644 --- a/archive/2016/results/QF_ANIA-app.html +++ b/archive/2016/results/QF_ANIA-app.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -148,7 +148,6 @@

Parallel Performance

- + - diff --git a/archive/2016/results/QF_ANIA-ucore.html b/archive/2016/results/QF_ANIA-ucore.html index 971569bf..2f14e41d 100644 --- a/archive/2016/results/QF_ANIA-ucore.html +++ b/archive/2016/results/QF_ANIA-ucore.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -140,7 +140,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/QF_ANIA.html b/archive/2016/results/QF_ANIA.html index 405a0181..84d83cea 100644 --- a/archive/2016/results/QF_ANIA.html +++ b/archive/2016/results/QF_ANIA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -174,7 +174,6 @@

Parallel Performance

- + - diff --git a/archive/2016/results/QF_AUFBV-ucore.html b/archive/2016/results/QF_AUFBV-ucore.html index 5a596bc8..e5185fa2 100644 --- a/archive/2016/results/QF_AUFBV-ucore.html +++ b/archive/2016/results/QF_AUFBV-ucore.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -148,7 +148,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/QF_AUFBV.html b/archive/2016/results/QF_AUFBV.html index 3b601b2f..6636ac6f 100644 --- a/archive/2016/results/QF_AUFBV.html +++ b/archive/2016/results/QF_AUFBV.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -229,7 +229,6 @@

Parallel Performance

- + - diff --git a/archive/2016/results/QF_AUFLIA-app.html b/archive/2016/results/QF_AUFLIA-app.html index 407f3362..be0fa867 100644 --- a/archive/2016/results/QF_AUFLIA-app.html +++ b/archive/2016/results/QF_AUFLIA-app.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -170,7 +170,6 @@

Parallel Performance

- + - diff --git a/archive/2016/results/QF_AUFLIA-ucore.html b/archive/2016/results/QF_AUFLIA-ucore.html index 9287767c..d8cd52a8 100644 --- a/archive/2016/results/QF_AUFLIA-ucore.html +++ b/archive/2016/results/QF_AUFLIA-ucore.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -163,7 +163,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/QF_AUFLIA.html b/archive/2016/results/QF_AUFLIA.html index 0835efb1..e96a6294 100644 --- a/archive/2016/results/QF_AUFLIA.html +++ b/archive/2016/results/QF_AUFLIA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -241,7 +241,6 @@

Parallel Performance

- + - diff --git a/archive/2016/results/QF_AUFNIA-ucore.html b/archive/2016/results/QF_AUFNIA-ucore.html index 12c2276e..923bfa90 100644 --- a/archive/2016/results/QF_AUFNIA-ucore.html +++ b/archive/2016/results/QF_AUFNIA-ucore.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -140,7 +140,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/QF_AUFNIA.html b/archive/2016/results/QF_AUFNIA.html index 35b5454f..e537a38d 100644 --- a/archive/2016/results/QF_AUFNIA.html +++ b/archive/2016/results/QF_AUFNIA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -174,7 +174,6 @@

Parallel Performance

- + - diff --git a/archive/2016/results/QF_AX-ucore.html b/archive/2016/results/QF_AX-ucore.html index 81a9f1fb..c668cd49 100644 --- a/archive/2016/results/QF_AX-ucore.html +++ b/archive/2016/results/QF_AX-ucore.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -155,7 +155,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/QF_AX.html b/archive/2016/results/QF_AX.html index 4a841dbb..ee9755d8 100644 --- a/archive/2016/results/QF_AX.html +++ b/archive/2016/results/QF_AX.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -229,7 +229,6 @@

Parallel Performance

- + - diff --git a/archive/2016/results/QF_BV-app.html b/archive/2016/results/QF_BV-app.html index e388d0ca..086bf779 100644 --- a/archive/2016/results/QF_BV-app.html +++ b/archive/2016/results/QF_BV-app.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -205,7 +205,6 @@

Parallel Performance

- + - diff --git a/archive/2016/results/QF_BV-ucore.html b/archive/2016/results/QF_BV-ucore.html index ae5d999d..9e995d9d 100644 --- a/archive/2016/results/QF_BV-ucore.html +++ b/archive/2016/results/QF_BV-ucore.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -148,7 +148,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/QF_BV-unknown.html b/archive/2016/results/QF_BV-unknown.html index f549c76c..8ff833ea 100644 --- a/archive/2016/results/QF_BV-unknown.html +++ b/archive/2016/results/QF_BV-unknown.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -225,7 +225,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/QF_BV.html b/archive/2016/results/QF_BV.html index 512bed87..f391f4d9 100644 --- a/archive/2016/results/QF_BV.html +++ b/archive/2016/results/QF_BV.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -383,7 +383,6 @@

Parallel Performance

- + - diff --git a/archive/2016/results/QF_BVFP-ucore.html b/archive/2016/results/QF_BVFP-ucore.html index 641327a8..18a37c24 100644 --- a/archive/2016/results/QF_BVFP-ucore.html +++ b/archive/2016/results/QF_BVFP-ucore.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -148,7 +148,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/QF_BVFP.html b/archive/2016/results/QF_BVFP.html index a4eb7299..58f55e7e 100644 --- a/archive/2016/results/QF_BVFP.html +++ b/archive/2016/results/QF_BVFP.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -176,7 +176,6 @@

Parallel Performance

- + - diff --git a/archive/2016/results/QF_FP-ucore.html b/archive/2016/results/QF_FP-ucore.html index 2caea6c0..6025c601 100644 --- a/archive/2016/results/QF_FP-ucore.html +++ b/archive/2016/results/QF_FP-ucore.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -140,7 +140,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/QF_FP-unknown.html b/archive/2016/results/QF_FP-unknown.html index 4632941e..aa2c1ec1 100644 --- a/archive/2016/results/QF_FP-unknown.html +++ b/archive/2016/results/QF_FP-unknown.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -136,7 +136,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/QF_FP.html b/archive/2016/results/QF_FP.html index e10c83b9..c91d395b 100644 --- a/archive/2016/results/QF_FP.html +++ b/archive/2016/results/QF_FP.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -160,7 +160,6 @@

Parallel Performance

- + - diff --git a/archive/2016/results/QF_IDL-ucore.html b/archive/2016/results/QF_IDL-ucore.html index 3e95a44e..104c1d5e 100644 --- a/archive/2016/results/QF_IDL-ucore.html +++ b/archive/2016/results/QF_IDL-ucore.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -155,7 +155,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/QF_IDL-unknown.html b/archive/2016/results/QF_IDL-unknown.html index 9abe300f..b1833cf6 100644 --- a/archive/2016/results/QF_IDL-unknown.html +++ b/archive/2016/results/QF_IDL-unknown.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -158,7 +158,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/QF_IDL.html b/archive/2016/results/QF_IDL.html index 455a30b1..3b6322b3 100644 --- a/archive/2016/results/QF_IDL.html +++ b/archive/2016/results/QF_IDL.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -225,7 +225,6 @@

Parallel Performance

- + - diff --git a/archive/2016/results/QF_LIA-app.html b/archive/2016/results/QF_LIA-app.html index ec74466d..7c783f2d 100644 --- a/archive/2016/results/QF_LIA-app.html +++ b/archive/2016/results/QF_LIA-app.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -170,7 +170,6 @@

Parallel Performance

- + - diff --git a/archive/2016/results/QF_LIA-ucore.html b/archive/2016/results/QF_LIA-ucore.html index 0ad27b85..d3c14390 100644 --- a/archive/2016/results/QF_LIA-ucore.html +++ b/archive/2016/results/QF_LIA-ucore.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -161,7 +161,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/QF_LIA-unknown.html b/archive/2016/results/QF_LIA-unknown.html index 942e2340..c122e626 100644 --- a/archive/2016/results/QF_LIA-unknown.html +++ b/archive/2016/results/QF_LIA-unknown.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -177,7 +177,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/QF_LIA.html b/archive/2016/results/QF_LIA.html index c8e0f0cf..e34ea66e 100644 --- a/archive/2016/results/QF_LIA.html +++ b/archive/2016/results/QF_LIA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -271,7 +271,6 @@

Parallel Performance

- + - diff --git a/archive/2016/results/QF_LIRA-ucore.html b/archive/2016/results/QF_LIRA-ucore.html index da9c66ea..2e010d74 100644 --- a/archive/2016/results/QF_LIRA-ucore.html +++ b/archive/2016/results/QF_LIRA-ucore.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -147,7 +147,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/QF_LIRA-unknown.html b/archive/2016/results/QF_LIRA-unknown.html index 91c7290e..379570bb 100644 --- a/archive/2016/results/QF_LIRA-unknown.html +++ b/archive/2016/results/QF_LIRA-unknown.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -154,7 +154,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/QF_LIRA.html b/archive/2016/results/QF_LIRA.html index 3890538c..268e1354 100644 --- a/archive/2016/results/QF_LIRA.html +++ b/archive/2016/results/QF_LIRA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -213,7 +213,6 @@

Parallel Performance

- + - diff --git a/archive/2016/results/QF_LRA-app.html b/archive/2016/results/QF_LRA-app.html index b72a2376..6bdab50e 100644 --- a/archive/2016/results/QF_LRA-app.html +++ b/archive/2016/results/QF_LRA-app.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -168,7 +168,6 @@

Parallel Performance

- + - diff --git a/archive/2016/results/QF_LRA-ucore.html b/archive/2016/results/QF_LRA-ucore.html index db30a2ad..fb208024 100644 --- a/archive/2016/results/QF_LRA-ucore.html +++ b/archive/2016/results/QF_LRA-ucore.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -171,7 +171,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/QF_LRA-unknown.html b/archive/2016/results/QF_LRA-unknown.html index 2027b7b1..71c9357f 100644 --- a/archive/2016/results/QF_LRA-unknown.html +++ b/archive/2016/results/QF_LRA-unknown.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -185,7 +185,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/QF_LRA.html b/archive/2016/results/QF_LRA.html index 8eac5e12..17c50f45 100644 --- a/archive/2016/results/QF_LRA.html +++ b/archive/2016/results/QF_LRA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -285,7 +285,6 @@

Parallel Performance

- + - diff --git a/archive/2016/results/QF_NIA-app.html b/archive/2016/results/QF_NIA-app.html index 6c9e262d..35305fdd 100644 --- a/archive/2016/results/QF_NIA-app.html +++ b/archive/2016/results/QF_NIA-app.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -146,7 +146,6 @@

Parallel Performance

- + - diff --git a/archive/2016/results/QF_NIA-ucore.html b/archive/2016/results/QF_NIA-ucore.html index 9885b599..810118f7 100644 --- a/archive/2016/results/QF_NIA-ucore.html +++ b/archive/2016/results/QF_NIA-ucore.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -140,7 +140,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/QF_NIA-unknown.html b/archive/2016/results/QF_NIA-unknown.html index e3254c05..41dca79c 100644 --- a/archive/2016/results/QF_NIA-unknown.html +++ b/archive/2016/results/QF_NIA-unknown.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -178,7 +178,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/QF_NIA.html b/archive/2016/results/QF_NIA.html index 780fd047..e01675f8 100644 --- a/archive/2016/results/QF_NIA.html +++ b/archive/2016/results/QF_NIA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -269,7 +269,6 @@

Parallel Performance

- + - diff --git a/archive/2016/results/QF_NIRA-ucore.html b/archive/2016/results/QF_NIRA-ucore.html index 62c68287..0e97ccd6 100644 --- a/archive/2016/results/QF_NIRA-ucore.html +++ b/archive/2016/results/QF_NIRA-ucore.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -140,7 +140,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/QF_NIRA-unknown.html b/archive/2016/results/QF_NIRA-unknown.html index 80b215de..d3d96a5e 100644 --- a/archive/2016/results/QF_NIRA-unknown.html +++ b/archive/2016/results/QF_NIRA-unknown.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -160,7 +160,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/QF_NIRA.html b/archive/2016/results/QF_NIRA.html index ab6eb196..290b04ed 100644 --- a/archive/2016/results/QF_NIRA.html +++ b/archive/2016/results/QF_NIRA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -227,7 +227,6 @@

Parallel Performance

- + - diff --git a/archive/2016/results/QF_NRA-ucore.html b/archive/2016/results/QF_NRA-ucore.html index f98de116..b2039791 100644 --- a/archive/2016/results/QF_NRA-ucore.html +++ b/archive/2016/results/QF_NRA-ucore.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -140,7 +140,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/QF_NRA-unknown.html b/archive/2016/results/QF_NRA-unknown.html index 653aa66e..f9412b4d 100644 --- a/archive/2016/results/QF_NRA-unknown.html +++ b/archive/2016/results/QF_NRA-unknown.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -166,7 +166,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/QF_NRA.html b/archive/2016/results/QF_NRA.html index dc41aca4..214cd0ee 100644 --- a/archive/2016/results/QF_NRA.html +++ b/archive/2016/results/QF_NRA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -239,7 +239,6 @@

Parallel Performance

- + - diff --git a/archive/2016/results/QF_RDL-ucore.html b/archive/2016/results/QF_RDL-ucore.html index aeb74920..d0984abc 100644 --- a/archive/2016/results/QF_RDL-ucore.html +++ b/archive/2016/results/QF_RDL-ucore.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -163,7 +163,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/QF_RDL-unknown.html b/archive/2016/results/QF_RDL-unknown.html index a02faf34..5e032191 100644 --- a/archive/2016/results/QF_RDL-unknown.html +++ b/archive/2016/results/QF_RDL-unknown.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -172,7 +172,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/QF_RDL.html b/archive/2016/results/QF_RDL.html index 6d796d83..bcf65438 100644 --- a/archive/2016/results/QF_RDL.html +++ b/archive/2016/results/QF_RDL.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -253,7 +253,6 @@

Parallel Performance

- + - diff --git a/archive/2016/results/QF_UF-ucore.html b/archive/2016/results/QF_UF-ucore.html index 47934506..c3efe69c 100644 --- a/archive/2016/results/QF_UF-ucore.html +++ b/archive/2016/results/QF_UF-ucore.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -169,7 +169,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/QF_UF-unknown.html b/archive/2016/results/QF_UF-unknown.html index 8ff3e69e..cdd8693d 100644 --- a/archive/2016/results/QF_UF-unknown.html +++ b/archive/2016/results/QF_UF-unknown.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -179,7 +179,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/QF_UF.html b/archive/2016/results/QF_UF.html index b857d25b..ebbc249a 100644 --- a/archive/2016/results/QF_UF.html +++ b/archive/2016/results/QF_UF.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -271,7 +271,6 @@

Parallel Performance

- + - diff --git a/archive/2016/results/QF_UFBV-ucore.html b/archive/2016/results/QF_UFBV-ucore.html index 361f2705..a8854d99 100644 --- a/archive/2016/results/QF_UFBV-ucore.html +++ b/archive/2016/results/QF_UFBV-ucore.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -148,7 +148,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/QF_UFBV.html b/archive/2016/results/QF_UFBV.html index 08c7cc45..75a2ba23 100644 --- a/archive/2016/results/QF_UFBV.html +++ b/archive/2016/results/QF_UFBV.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -229,7 +229,6 @@

Parallel Performance

- + - diff --git a/archive/2016/results/QF_UFIDL-ucore.html b/archive/2016/results/QF_UFIDL-ucore.html index 3be2f777..cb87e6d2 100644 --- a/archive/2016/results/QF_UFIDL-ucore.html +++ b/archive/2016/results/QF_UFIDL-ucore.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -155,7 +155,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/QF_UFIDL.html b/archive/2016/results/QF_UFIDL.html index 6f79aeda..50bb4551 100644 --- a/archive/2016/results/QF_UFIDL.html +++ b/archive/2016/results/QF_UFIDL.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -227,7 +227,6 @@

Parallel Performance

- + - diff --git a/archive/2016/results/QF_UFLIA-app.html b/archive/2016/results/QF_UFLIA-app.html index dad587f0..7cda3625 100644 --- a/archive/2016/results/QF_UFLIA-app.html +++ b/archive/2016/results/QF_UFLIA-app.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -168,7 +168,6 @@

Parallel Performance

- + - diff --git a/archive/2016/results/QF_UFLIA-ucore.html b/archive/2016/results/QF_UFLIA-ucore.html index 86eddc24..e93caedf 100644 --- a/archive/2016/results/QF_UFLIA-ucore.html +++ b/archive/2016/results/QF_UFLIA-ucore.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -163,7 +163,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/QF_UFLIA.html b/archive/2016/results/QF_UFLIA.html index 6096c0b6..cc5cfa9d 100644 --- a/archive/2016/results/QF_UFLIA.html +++ b/archive/2016/results/QF_UFLIA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -243,7 +243,6 @@

Parallel Performance

- + - diff --git a/archive/2016/results/QF_UFLRA-app.html b/archive/2016/results/QF_UFLRA-app.html index 5e095d08..1b94db25 100644 --- a/archive/2016/results/QF_UFLRA-app.html +++ b/archive/2016/results/QF_UFLRA-app.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -170,7 +170,6 @@

Parallel Performance

- + - diff --git a/archive/2016/results/QF_UFLRA-ucore.html b/archive/2016/results/QF_UFLRA-ucore.html index 82502c35..9368206f 100644 --- a/archive/2016/results/QF_UFLRA-ucore.html +++ b/archive/2016/results/QF_UFLRA-ucore.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -171,7 +171,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/QF_UFLRA-unknown.html b/archive/2016/results/QF_UFLRA-unknown.html index 1230f41f..02d2469c 100644 --- a/archive/2016/results/QF_UFLRA-unknown.html +++ b/archive/2016/results/QF_UFLRA-unknown.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -173,7 +173,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/QF_UFLRA.html b/archive/2016/results/QF_UFLRA.html index 4176e206..07f80391 100644 --- a/archive/2016/results/QF_UFLRA.html +++ b/archive/2016/results/QF_UFLRA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -257,7 +257,6 @@

Parallel Performance

- + - diff --git a/archive/2016/results/QF_UFNIA-app.html b/archive/2016/results/QF_UFNIA-app.html index c66ee588..91eea41a 100644 --- a/archive/2016/results/QF_UFNIA-app.html +++ b/archive/2016/results/QF_UFNIA-app.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -148,7 +148,6 @@

Parallel Performance

- + - diff --git a/archive/2016/results/QF_UFNIA-ucore.html b/archive/2016/results/QF_UFNIA-ucore.html index 6bad8812..386dca18 100644 --- a/archive/2016/results/QF_UFNIA-ucore.html +++ b/archive/2016/results/QF_UFNIA-ucore.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -140,7 +140,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/QF_UFNIA.html b/archive/2016/results/QF_UFNIA.html index 821cdd5a..a927fe8d 100644 --- a/archive/2016/results/QF_UFNIA.html +++ b/archive/2016/results/QF_UFNIA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -199,7 +199,6 @@

Parallel Performance

- + - diff --git a/archive/2016/results/QF_UFNRA-ucore.html b/archive/2016/results/QF_UFNRA-ucore.html index 08783389..4531530c 100644 --- a/archive/2016/results/QF_UFNRA-ucore.html +++ b/archive/2016/results/QF_UFNRA-ucore.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -140,7 +140,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/QF_UFNRA-unknown.html b/archive/2016/results/QF_UFNRA-unknown.html index 172545ac..35e56f9c 100644 --- a/archive/2016/results/QF_UFNRA-unknown.html +++ b/archive/2016/results/QF_UFNRA-unknown.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -148,7 +148,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/QF_UFNRA.html b/archive/2016/results/QF_UFNRA.html index 35be8963..fb3658e7 100644 --- a/archive/2016/results/QF_UFNRA.html +++ b/archive/2016/results/QF_UFNRA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -199,7 +199,6 @@

Parallel Performance

- + - diff --git a/archive/2016/results/UF-ucore.html b/archive/2016/results/UF-ucore.html index 42da367e..af9f33b1 100644 --- a/archive/2016/results/UF-ucore.html +++ b/archive/2016/results/UF-ucore.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -146,7 +146,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/UF-unknown.html b/archive/2016/results/UF-unknown.html index 5b3fe124..7d5405cc 100644 --- a/archive/2016/results/UF-unknown.html +++ b/archive/2016/results/UF-unknown.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -158,7 +158,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/UF.html b/archive/2016/results/UF.html index 9d983ff4..9c4c07c1 100644 --- a/archive/2016/results/UF.html +++ b/archive/2016/results/UF.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -227,7 +227,6 @@

Parallel Performance

- + - diff --git a/archive/2016/results/UFBV-ucore.html b/archive/2016/results/UFBV-ucore.html index c9e24635..ae947d35 100644 --- a/archive/2016/results/UFBV-ucore.html +++ b/archive/2016/results/UFBV-ucore.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -140,7 +140,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/UFBV-unknown.html b/archive/2016/results/UFBV-unknown.html index 86d23fcc..33bc42ab 100644 --- a/archive/2016/results/UFBV-unknown.html +++ b/archive/2016/results/UFBV-unknown.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -148,7 +148,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/UFBV.html b/archive/2016/results/UFBV.html index 800d232c..a73aec9d 100644 --- a/archive/2016/results/UFBV.html +++ b/archive/2016/results/UFBV.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -199,7 +199,6 @@

Parallel Performance

- + - diff --git a/archive/2016/results/UFIDL-ucore.html b/archive/2016/results/UFIDL-ucore.html index 24d4d140..e4a1995b 100644 --- a/archive/2016/results/UFIDL-ucore.html +++ b/archive/2016/results/UFIDL-ucore.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -148,7 +148,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/UFIDL-unknown.html b/archive/2016/results/UFIDL-unknown.html index b1692d32..a2eb76a9 100644 --- a/archive/2016/results/UFIDL-unknown.html +++ b/archive/2016/results/UFIDL-unknown.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -160,7 +160,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/UFIDL.html b/archive/2016/results/UFIDL.html index 091e4ac8..ffeab5f9 100644 --- a/archive/2016/results/UFIDL.html +++ b/archive/2016/results/UFIDL.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -227,7 +227,6 @@

Parallel Performance

- + - diff --git a/archive/2016/results/UFLIA-ucore.html b/archive/2016/results/UFLIA-ucore.html index aafb9b02..8a571996 100644 --- a/archive/2016/results/UFLIA-ucore.html +++ b/archive/2016/results/UFLIA-ucore.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -148,7 +148,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/UFLIA-unknown.html b/archive/2016/results/UFLIA-unknown.html index f2e3216d..17f7918b 100644 --- a/archive/2016/results/UFLIA-unknown.html +++ b/archive/2016/results/UFLIA-unknown.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -160,7 +160,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/UFLIA.html b/archive/2016/results/UFLIA.html index 5fda9d56..2c980d44 100644 --- a/archive/2016/results/UFLIA.html +++ b/archive/2016/results/UFLIA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -225,7 +225,6 @@

Parallel Performance

- + - diff --git a/archive/2016/results/UFLRA-app.html b/archive/2016/results/UFLRA-app.html index bd2559d6..0c3ac2ba 100644 --- a/archive/2016/results/UFLRA-app.html +++ b/archive/2016/results/UFLRA-app.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -147,7 +147,6 @@

Parallel Performance

- + - diff --git a/archive/2016/results/UFLRA-ucore.html b/archive/2016/results/UFLRA-ucore.html index afa494da..ee562c61 100644 --- a/archive/2016/results/UFLRA-ucore.html +++ b/archive/2016/results/UFLRA-ucore.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -146,7 +146,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/UFLRA.html b/archive/2016/results/UFLRA.html index d9ed4470..7f444923 100644 --- a/archive/2016/results/UFLRA.html +++ b/archive/2016/results/UFLRA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -227,7 +227,6 @@

Parallel Performance

- + - diff --git a/archive/2016/results/UFNIA-ucore.html b/archive/2016/results/UFNIA-ucore.html index 64df2151..fd99c90e 100644 --- a/archive/2016/results/UFNIA-ucore.html +++ b/archive/2016/results/UFNIA-ucore.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -140,7 +140,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/UFNIA-unknown.html b/archive/2016/results/UFNIA-unknown.html index 584f398c..8cfcd33b 100644 --- a/archive/2016/results/UFNIA-unknown.html +++ b/archive/2016/results/UFNIA-unknown.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -152,7 +152,6 @@

Sequential Performance

- + - diff --git a/archive/2016/results/UFNIA.html b/archive/2016/results/UFNIA.html index fdc8ca8c..bd4ea136 100644 --- a/archive/2016/results/UFNIA.html +++ b/archive/2016/results/UFNIA.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -213,7 +213,6 @@

Parallel Performance

- + - diff --git a/archive/2016/results/app-summary.html b/archive/2016/results/app-summary.html index 88a37aba..fd918b55 100644 --- a/archive/2016/results/app-summary.html +++ b/archive/2016/results/app-summary.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -216,7 +216,6 @@

Application Track (Summary)

- + - diff --git a/archive/2016/results/competition-main.html b/archive/2016/results/competition-main.html index 5dc304f3..d34bd0c1 100644 --- a/archive/2016/results/competition-main.html +++ b/archive/2016/results/competition-main.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -405,7 +405,6 @@

Competition-Wide Scoring fo

- + - diff --git a/archive/2016/results/summary.html b/archive/2016/results/summary.html index 8f264d03..7fae0d8d 100644 --- a/archive/2016/results/summary.html +++ b/archive/2016/results/summary.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -553,7 +553,6 @@

Main Track (Summary)

- + - diff --git a/archive/2016/results/ucore-summary.html b/archive/2016/results/ucore-summary.html index ca02b7b6..7de79ed1 100644 --- a/archive/2016/results/ucore-summary.html +++ b/archive/2016/results/ucore-summary.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -396,7 +396,6 @@

Unsat Core Track (Summary)

- + - diff --git a/archive/2016/results/unknown-summary.html b/archive/2016/results/unknown-summary.html index cf6dcf7c..20318c6e 100644 --- a/archive/2016/results/unknown-summary.html +++ b/archive/2016/results/unknown-summary.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -281,7 +281,6 @@

Unknown Benchmarks Track (Summary)

- + - diff --git a/archive/2016/specs.html b/archive/2016/specs.html index bf72a474..4b3fd898 100644 --- a/archive/2016/specs.html +++ b/archive/2016/specs.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -121,7 +121,6 @@

Machine Specifications

- + - diff --git a/archive/2016/tools.html b/archive/2016/tools.html index 4649f583..a82fdd5d 100644 --- a/archive/2016/tools.html +++ b/archive/2016/tools.html @@ -26,7 +26,7 @@ - +
@@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2016 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -71,8 +71,8 @@

Tools

Pre-Processor (Benchmark Scrambler)

GitHub Repository
Sources

SMT-COMP 2016 Releases

    - - + +
  • Main Track
    • Binary @@ -80,8 +80,8 @@

      SMT-COMP 2016 Releases

      available on StarExec as SMT-COMP 2016 Scrambler (id: 349)
  • - - + +
  • Application Track
    • Binary @@ -89,8 +89,8 @@

      SMT-COMP 2016 Releases

      available on StarExec as SMT-COMP 2016 Application Scrambler (id: 285)
  • - - + +
  • Unsat Core Track
    Patch generate_unsat_core_benchmark.patch was applied on top of the sources.
    • @@ -99,15 +99,15 @@

      SMT-COMP 2016 Releases

      available on StarExec as SMT-COMP 2016 Unsat-Core Scrambler (id: 350)
  • - - + +

Post-Processor

GitHub Repository
Sources

SMT-COMP 2016 Releases

    - - + +
  • Main Track
    • Binary @@ -115,8 +115,8 @@

      SMT-COMP 2016 Releases

      available on StarExec as SMT-COMP 2016 (id: 273)
  • - - + +
  • Application Track
    • Binary @@ -124,8 +124,8 @@

      SMT-COMP 2016 Releases

      available on StarExec as SMT-COMP 2016 Application Track (id: 274)
  • - - + +
  • Unsat Core Track
    • Binary @@ -133,8 +133,8 @@

      SMT-COMP 2016 Releases

      available on StarExec as SMT-COMP 2016 Unsat-Core Track (id: 297)
  • - - + +

Trace executor

GitHub Repository
Sources
Binary
@@ -142,7 +142,7 @@

Trace executor

G
All solvers wrapped with the Trace executor are available
here.

- + @@ -155,7 +155,6 @@

Trace executor

G
- + - diff --git a/archive/2017/benchmarks.html b/archive/2017/benchmarks.html index bf980044..a6d6e73c 100644 --- a/archive/2017/benchmarks.html +++ b/archive/2017/benchmarks.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -901,7 +901,6 @@

Unsat-Core Track

- + - diff --git a/archive/2017/call-for-comments.txt b/archive/2017/call-for-comments.txt index 5a3ae8f8..8013dd05 100644 --- a/archive/2017/call-for-comments.txt +++ b/archive/2017/call-for-comments.txt @@ -31,7 +31,7 @@ committee. The organizing team for SMT-COMP'17 is This is a call for three things: -CALL FOR COMMENTS: +CALL FOR COMMENTS: The organizing team is preparing the schedule and rules for 2017. Any comments you may have to improve the competition over past years diff --git a/archive/2017/index.html b/archive/2017/index.html index 2431de52..127c3429 100644 --- a/archive/2017/index.html +++ b/archive/2017/index.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -119,7 +119,6 @@

Organizers

- + - diff --git a/archive/2017/news/2017-03-15.html b/archive/2017/news/2017-03-15.html index 0e19ac8f..6aabd37a 100644 --- a/archive/2017/news/2017-03-15.html +++ b/archive/2017/news/2017-03-15.html @@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -91,7 +91,6 @@

- + - diff --git a/archive/2017/news/2017-04-21.html b/archive/2017/news/2017-04-21.html index ce0496ba..db3a2832 100644 --- a/archive/2017/news/2017-04-21.html +++ b/archive/2017/news/2017-04-21.html @@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -80,7 +80,6 @@

SMT-COMP 2017 draft rules posted

- + - diff --git a/archive/2017/news/2017-06-20.html b/archive/2017/news/2017-06-20.html index 133705e4..d43dd0c7 100644 --- a/archive/2017/news/2017-06-20.html +++ b/archive/2017/news/2017-06-20.html @@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -81,7 +81,6 @@

SMT-COMP 2017 competition jobs started

- + - diff --git a/archive/2017/news/2017-07-23.html b/archive/2017/news/2017-07-23.html index 2641137e..099d7e64 100644 --- a/archive/2017/news/2017-07-23.html +++ b/archive/2017/news/2017-07-23.html @@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -80,7 +80,6 @@

SMT-COMP 2017 results presented

- + - diff --git a/archive/2017/participants.html b/archive/2017/participants.html index 3469ccae..18537535 100644 --- a/archive/2017/participants.html +++ b/archive/2017/participants.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -572,7 +572,6 @@

Divisions

- + - diff --git a/archive/2017/participants.md b/archive/2017/participants.md index 05b0e0ea..840c66f7 100644 --- a/archive/2017/participants.md +++ b/archive/2017/participants.md @@ -497,4 +497,3 @@ 3. Unsat-core track.

- diff --git a/archive/2017/results.html b/archive/2017/results.html index 7ac4352f..32f4495a 100644 --- a/archive/2017/results.html +++ b/archive/2017/results.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -229,7 +229,6 @@

SMT-COMP 2017 Results

- + - diff --git a/archive/2017/results/ABVFP.html b/archive/2017/results/ABVFP.html index ec5b702a..0c4d59e2 100644 --- a/archive/2017/results/ABVFP.html +++ b/archive/2017/results/ABVFP.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -107,7 +107,6 @@

ABVFP (Main Track)

- + - diff --git a/archive/2017/results/ALIA-app.html b/archive/2017/results/ALIA-app.html index ee5d34e1..4ed19875 100644 --- a/archive/2017/results/ALIA-app.html +++ b/archive/2017/results/ALIA-app.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -147,7 +147,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/ALIA-ucore.html b/archive/2017/results/ALIA-ucore.html index 110c00af..1f372de8 100644 --- a/archive/2017/results/ALIA-ucore.html +++ b/archive/2017/results/ALIA-ucore.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -170,7 +170,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/ALIA.html b/archive/2017/results/ALIA.html index fb2c3926..49d76c32 100644 --- a/archive/2017/results/ALIA.html +++ b/archive/2017/results/ALIA.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -217,7 +217,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/ANIA-app.html b/archive/2017/results/ANIA-app.html index 44fe2b40..102b23a7 100644 --- a/archive/2017/results/ANIA-app.html +++ b/archive/2017/results/ANIA-app.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -147,7 +147,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/ANIA-ucore.html b/archive/2017/results/ANIA-ucore.html index 4157e831..1a9f0a54 100644 --- a/archive/2017/results/ANIA-ucore.html +++ b/archive/2017/results/ANIA-ucore.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -107,7 +107,6 @@

ANIA (Unsat Core Track)

- + - diff --git a/archive/2017/results/AUFBVDTLIA.html b/archive/2017/results/AUFBVDTLIA.html index 61bb7914..71918717 100644 --- a/archive/2017/results/AUFBVDTLIA.html +++ b/archive/2017/results/AUFBVDTLIA.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -97,7 +97,7 @@

AUFBVDTLIA (Main Track)

Competition results for the AUFBVDTLIA division as of Fri Jul 21 10:18:02 GMT

-

Benchmarks in this division : 1709 +

Benchmarks in this division : 1709
Time Limit: 1200s

@@ -159,7 +159,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/AUFDTLIA.html b/archive/2017/results/AUFDTLIA.html index dac17b70..7b8e9bf7 100644 --- a/archive/2017/results/AUFDTLIA.html +++ b/archive/2017/results/AUFDTLIA.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -173,7 +173,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/AUFLIA-ucore.html b/archive/2017/results/AUFLIA-ucore.html index 2dad20fb..4a01e656 100644 --- a/archive/2017/results/AUFLIA-ucore.html +++ b/archive/2017/results/AUFLIA-ucore.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -171,7 +171,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/AUFLIA.html b/archive/2017/results/AUFLIA.html index 92252d32..eb488d64 100644 --- a/archive/2017/results/AUFLIA.html +++ b/archive/2017/results/AUFLIA.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -218,7 +218,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/AUFLIRA-ucore.html b/archive/2017/results/AUFLIRA-ucore.html index 2c264a99..5679935f 100644 --- a/archive/2017/results/AUFLIRA-ucore.html +++ b/archive/2017/results/AUFLIRA-ucore.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -170,7 +170,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/AUFLIRA.html b/archive/2017/results/AUFLIRA.html index 3182d60f..4a3e45de 100644 --- a/archive/2017/results/AUFLIRA.html +++ b/archive/2017/results/AUFLIRA.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -97,7 +97,7 @@

AUFLIRA (Main Track)

Competition results for the AUFLIRA division as of Fri Jul 21 10:18:02 GMT

-

Benchmarks in this division : 20011 +

Benchmarks in this division : 20011
Time Limit: 1200s

@@ -218,7 +218,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/AUFNIRA-ucore.html b/archive/2017/results/AUFNIRA-ucore.html index 6d1173f5..0e801025 100644 --- a/archive/2017/results/AUFNIRA-ucore.html +++ b/archive/2017/results/AUFNIRA-ucore.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -97,7 +97,7 @@

AUFNIRA (Unsat Core Track)

Competition results for the AUFNIRA division as of Tue Jul 18 22:06:21 GMT

-

Benchmarks in this division : 1050 +

Benchmarks in this division : 1050
Time Limit: 2400s

@@ -171,7 +171,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/AUFNIRA.html b/archive/2017/results/AUFNIRA.html index 2a12c292..540f3c62 100644 --- a/archive/2017/results/AUFNIRA.html +++ b/archive/2017/results/AUFNIRA.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -97,7 +97,7 @@

AUFNIRA (Main Track)

Competition results for the AUFNIRA division as of Fri Jul 21 10:18:02 GMT

-

Benchmarks in this division : 1480 +

Benchmarks in this division : 1480
Time Limit: 1200s

@@ -203,7 +203,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/BV-ucore.html b/archive/2017/results/BV-ucore.html index c8ce1886..342bdcce 100644 --- a/archive/2017/results/BV-ucore.html +++ b/archive/2017/results/BV-ucore.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -170,7 +170,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/BV.html b/archive/2017/results/BV.html index 44cbdf7f..122b781f 100644 --- a/archive/2017/results/BV.html +++ b/archive/2017/results/BV.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -97,7 +97,7 @@

BV (Main Track)

Competition results for the BV division as of Fri Jul 21 10:18:02 GMT

-

Benchmarks in this division : 5150 +

Benchmarks in this division : 5150
Time Limit: 1200s

@@ -218,7 +218,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/LIA-app.html b/archive/2017/results/LIA-app.html index 4c821ab7..5b6f534a 100644 --- a/archive/2017/results/LIA-app.html +++ b/archive/2017/results/LIA-app.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -147,7 +147,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/LIA-ucore.html b/archive/2017/results/LIA-ucore.html index eda4733e..3cbbf4a1 100644 --- a/archive/2017/results/LIA-ucore.html +++ b/archive/2017/results/LIA-ucore.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -169,7 +169,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/LIA.html b/archive/2017/results/LIA.html index cb625f93..19c6355a 100644 --- a/archive/2017/results/LIA.html +++ b/archive/2017/results/LIA.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -218,7 +218,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/LRA-ucore.html b/archive/2017/results/LRA-ucore.html index 90d0ab10..ac12b182 100644 --- a/archive/2017/results/LRA-ucore.html +++ b/archive/2017/results/LRA-ucore.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -97,7 +97,7 @@

LRA (Unsat Core Track)

Competition results for the LRA division as of Tue Jul 18 22:06:21 GMT

-

Benchmarks in this division : 1106 +

Benchmarks in this division : 1106
Time Limit: 2400s

@@ -171,7 +171,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/LRA.html b/archive/2017/results/LRA.html index ae37d69a..e3a49468 100644 --- a/archive/2017/results/LRA.html +++ b/archive/2017/results/LRA.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -233,7 +233,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/NIA-ucore.html b/archive/2017/results/NIA-ucore.html index b9e23667..820559fe 100644 --- a/archive/2017/results/NIA-ucore.html +++ b/archive/2017/results/NIA-ucore.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -170,7 +170,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/NIA.html b/archive/2017/results/NIA.html index 2fbe40ba..0a8e1429 100644 --- a/archive/2017/results/NIA.html +++ b/archive/2017/results/NIA.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -203,7 +203,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/NRA-ucore.html b/archive/2017/results/NRA-ucore.html index bd3db58d..9f4baf4c 100644 --- a/archive/2017/results/NRA-ucore.html +++ b/archive/2017/results/NRA-ucore.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -97,7 +97,7 @@

NRA (Unsat Core Track)

Competition results for the NRA division as of Tue Jul 18 22:06:21 GMT

-

Benchmarks in this division : 3801 +

Benchmarks in this division : 3801
Time Limit: 2400s

@@ -171,7 +171,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/NRA.html b/archive/2017/results/NRA.html index feff14d3..6ae6ae54 100644 --- a/archive/2017/results/NRA.html +++ b/archive/2017/results/NRA.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -232,7 +232,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/QF_ABV-ucore.html b/archive/2017/results/QF_ABV-ucore.html index db39b179..f59ef1b7 100644 --- a/archive/2017/results/QF_ABV-ucore.html +++ b/archive/2017/results/QF_ABV-ucore.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -185,7 +185,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/QF_ABV.html b/archive/2017/results/QF_ABV.html index 865b79a5..04289cf4 100644 --- a/archive/2017/results/QF_ABV.html +++ b/archive/2017/results/QF_ABV.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -97,7 +97,7 @@

QF_ABV (Main Track)

Competition results for the QF_ABV division as of Fri Jul 21 10:18:02 GMT

-

Benchmarks in this division : 15061 +

Benchmarks in this division : 15061
Time Limit: 1200s

@@ -235,7 +235,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/QF_ALIA-app.html b/archive/2017/results/QF_ALIA-app.html index e720ff08..ab6ea754 100644 --- a/archive/2017/results/QF_ALIA-app.html +++ b/archive/2017/results/QF_ALIA-app.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -169,7 +169,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/QF_ALIA-ucore.html b/archive/2017/results/QF_ALIA-ucore.html index a1016e2a..ad31a8f8 100644 --- a/archive/2017/results/QF_ALIA-ucore.html +++ b/archive/2017/results/QF_ALIA-ucore.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -97,7 +97,7 @@

QF_ALIA (Unsat Core Track)

Competition results for the QF_ALIA division as of Tue Jul 18 22:06:21 GMT

-

Benchmarks in this division : 80 +

Benchmarks in this division : 80
Time Limit: 2400s

@@ -210,7 +210,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/QF_ALIA.html b/archive/2017/results/QF_ALIA.html index d4c3697d..7dc43b3c 100644 --- a/archive/2017/results/QF_ALIA.html +++ b/archive/2017/results/QF_ALIA.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -250,7 +250,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/QF_ANIA-app.html b/archive/2017/results/QF_ANIA-app.html index afa2b94a..a656c59f 100644 --- a/archive/2017/results/QF_ANIA-app.html +++ b/archive/2017/results/QF_ANIA-app.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -147,7 +147,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/QF_ANIA-ucore.html b/archive/2017/results/QF_ANIA-ucore.html index 9bd1c73c..2b92b7fe 100644 --- a/archive/2017/results/QF_ANIA-ucore.html +++ b/archive/2017/results/QF_ANIA-ucore.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -170,7 +170,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/QF_ANIA.html b/archive/2017/results/QF_ANIA.html index 82eba866..e6cf150c 100644 --- a/archive/2017/results/QF_ANIA.html +++ b/archive/2017/results/QF_ANIA.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -176,7 +176,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/QF_AUFBV-ucore.html b/archive/2017/results/QF_AUFBV-ucore.html index f9043c81..62b7115e 100644 --- a/archive/2017/results/QF_AUFBV-ucore.html +++ b/archive/2017/results/QF_AUFBV-ucore.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -185,7 +185,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/QF_AUFBV.html b/archive/2017/results/QF_AUFBV.html index 4c8fd9fb..af050a24 100644 --- a/archive/2017/results/QF_AUFBV.html +++ b/archive/2017/results/QF_AUFBV.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -236,7 +236,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/QF_AUFLIA-app.html b/archive/2017/results/QF_AUFLIA-app.html index 182e0fb6..f10a8f15 100644 --- a/archive/2017/results/QF_AUFLIA-app.html +++ b/archive/2017/results/QF_AUFLIA-app.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -169,7 +169,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/QF_AUFLIA-ucore.html b/archive/2017/results/QF_AUFLIA-ucore.html index 43e0a862..2c4f2ce8 100644 --- a/archive/2017/results/QF_AUFLIA-ucore.html +++ b/archive/2017/results/QF_AUFLIA-ucore.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -210,7 +210,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/QF_AUFLIA.html b/archive/2017/results/QF_AUFLIA.html index 37aa6d8c..33f03413 100644 --- a/archive/2017/results/QF_AUFLIA.html +++ b/archive/2017/results/QF_AUFLIA.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -97,7 +97,7 @@

QF_AUFLIA (Main Track)

Competition results for the QF_AUFLIA division as of Fri Jul 21 10:18:02 GMT

-

Benchmarks in this division : 1009 +

Benchmarks in this division : 1009
Time Limit: 1200s

@@ -251,7 +251,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/QF_AUFNIA-ucore.html b/archive/2017/results/QF_AUFNIA-ucore.html index 7117d2a4..12562435 100644 --- a/archive/2017/results/QF_AUFNIA-ucore.html +++ b/archive/2017/results/QF_AUFNIA-ucore.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -170,7 +170,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/QF_AUFNIA.html b/archive/2017/results/QF_AUFNIA.html index 8e43c968..570c20a5 100644 --- a/archive/2017/results/QF_AUFNIA.html +++ b/archive/2017/results/QF_AUFNIA.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -175,7 +175,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/QF_AX-ucore.html b/archive/2017/results/QF_AX-ucore.html index de39f9eb..e860c073 100644 --- a/archive/2017/results/QF_AX-ucore.html +++ b/archive/2017/results/QF_AX-ucore.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -210,7 +210,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/QF_AX.html b/archive/2017/results/QF_AX.html index b3ebe959..1e8b69b1 100644 --- a/archive/2017/results/QF_AX.html +++ b/archive/2017/results/QF_AX.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -235,7 +235,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/QF_BV-app.html b/archive/2017/results/QF_BV-app.html index 944f1948..587cf61b 100644 --- a/archive/2017/results/QF_BV-app.html +++ b/archive/2017/results/QF_BV-app.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -162,7 +162,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/QF_BV-ucore.html b/archive/2017/results/QF_BV-ucore.html index bedc81aa..abf74b9f 100644 --- a/archive/2017/results/QF_BV-ucore.html +++ b/archive/2017/results/QF_BV-ucore.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -185,7 +185,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/QF_BV.html b/archive/2017/results/QF_BV.html index aaf6c8cc..1101098f 100644 --- a/archive/2017/results/QF_BV.html +++ b/archive/2017/results/QF_BV.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -97,7 +97,7 @@

QF_BV (Main Track)

Competition results for the QF_BV division as of Fri Jul 21 10:18:02 GMT

-

Benchmarks in this division : 40043 +

Benchmarks in this division : 40043
Time Limit: 1200s

@@ -310,7 +310,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/QF_BVFP-app.html b/archive/2017/results/QF_BVFP-app.html index 39e91516..d633a90a 100644 --- a/archive/2017/results/QF_BVFP-app.html +++ b/archive/2017/results/QF_BVFP-app.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -140,7 +140,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/QF_BVFP-ucore.html b/archive/2017/results/QF_BVFP-ucore.html index 45945bfa..3e4b19e5 100644 --- a/archive/2017/results/QF_BVFP-ucore.html +++ b/archive/2017/results/QF_BVFP-ucore.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -158,7 +158,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/QF_BVFP.html b/archive/2017/results/QF_BVFP.html index e2d25dad..c9b9d1fb 100644 --- a/archive/2017/results/QF_BVFP.html +++ b/archive/2017/results/QF_BVFP.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -176,7 +176,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/QF_DT.html b/archive/2017/results/QF_DT.html index f256f4ab..e263a3c0 100644 --- a/archive/2017/results/QF_DT.html +++ b/archive/2017/results/QF_DT.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -97,7 +97,7 @@

QF_DT (Main Track)

Competition results for the QF_DT division as of Fri Jul 21 10:18:02 GMT

-

Benchmarks in this division : 8000 +

Benchmarks in this division : 8000
Time Limit: 1200s

@@ -158,7 +158,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/QF_FP-app.html b/archive/2017/results/QF_FP-app.html index 6e7ce964..f6df7c75 100644 --- a/archive/2017/results/QF_FP-app.html +++ b/archive/2017/results/QF_FP-app.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -140,7 +140,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/QF_FP-ucore.html b/archive/2017/results/QF_FP-ucore.html index 1fb3ef9f..4845d93c 100644 --- a/archive/2017/results/QF_FP-ucore.html +++ b/archive/2017/results/QF_FP-ucore.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -97,7 +97,7 @@

QF_FP (Unsat Core Track)

Competition results for the QF_FP division as of Tue Jul 18 22:06:21 GMT

-

Benchmarks in this division : 20028 +

Benchmarks in this division : 20028
Time Limit: 2400s

@@ -157,7 +157,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/QF_FP.html b/archive/2017/results/QF_FP.html index aa564205..264c26a0 100644 --- a/archive/2017/results/QF_FP.html +++ b/archive/2017/results/QF_FP.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -97,7 +97,7 @@

QF_FP (Main Track)

Competition results for the QF_FP division as of Fri Jul 21 10:18:02 GMT

-

Benchmarks in this division : 40302 +

Benchmarks in this division : 40302
Time Limit: 1200s

@@ -203,7 +203,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/QF_IDL-ucore.html b/archive/2017/results/QF_IDL-ucore.html index e16848c2..3a1fd24c 100644 --- a/archive/2017/results/QF_IDL-ucore.html +++ b/archive/2017/results/QF_IDL-ucore.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -196,7 +196,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/QF_IDL.html b/archive/2017/results/QF_IDL.html index 793a08b4..965ef82a 100644 --- a/archive/2017/results/QF_IDL.html +++ b/archive/2017/results/QF_IDL.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -234,7 +234,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/QF_LIA-app.html b/archive/2017/results/QF_LIA-app.html index 23dfc768..3754aa7e 100644 --- a/archive/2017/results/QF_LIA-app.html +++ b/archive/2017/results/QF_LIA-app.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -169,7 +169,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/QF_LIA-ucore.html b/archive/2017/results/QF_LIA-ucore.html index f10af8c4..f499484e 100644 --- a/archive/2017/results/QF_LIA-ucore.html +++ b/archive/2017/results/QF_LIA-ucore.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -211,7 +211,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/QF_LIA.html b/archive/2017/results/QF_LIA.html index 1bb8f913..22e2c3dd 100644 --- a/archive/2017/results/QF_LIA.html +++ b/archive/2017/results/QF_LIA.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -266,7 +266,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/QF_LIRA-ucore.html b/archive/2017/results/QF_LIRA-ucore.html index 3f95d1c6..3ae38d3b 100644 --- a/archive/2017/results/QF_LIRA-ucore.html +++ b/archive/2017/results/QF_LIRA-ucore.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -196,7 +196,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/QF_LIRA.html b/archive/2017/results/QF_LIRA.html index 32d93835..caccfd8d 100644 --- a/archive/2017/results/QF_LIRA.html +++ b/archive/2017/results/QF_LIRA.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -234,7 +234,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/QF_LRA-app.html b/archive/2017/results/QF_LRA-app.html index 38bcb0cf..d6e9510d 100644 --- a/archive/2017/results/QF_LRA-app.html +++ b/archive/2017/results/QF_LRA-app.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -97,7 +97,7 @@

QF_LRA (Application Track)

Competition results for the QF_LRA division as of Tue Jul 18 22:06:21 GMT

-

Benchmarks in this division : 10 +

Benchmarks in this division : 10
Time Limit: 2400s

@@ -176,7 +176,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/QF_LRA-ucore.html b/archive/2017/results/QF_LRA-ucore.html index 4331c1dc..6374f414 100644 --- a/archive/2017/results/QF_LRA-ucore.html +++ b/archive/2017/results/QF_LRA-ucore.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -211,7 +211,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/QF_LRA.html b/archive/2017/results/QF_LRA.html index 211b5fe2..b17ce6ad 100644 --- a/archive/2017/results/QF_LRA.html +++ b/archive/2017/results/QF_LRA.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -281,7 +281,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/QF_NIA-app.html b/archive/2017/results/QF_NIA-app.html index 6be96cab..71c7eeea 100644 --- a/archive/2017/results/QF_NIA-app.html +++ b/archive/2017/results/QF_NIA-app.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -97,7 +97,7 @@

QF_NIA (Application Track)

Competition results for the QF_NIA division as of Tue Jul 18 22:06:21 GMT

-

Benchmarks in this division : 10 +

Benchmarks in this division : 10
Time Limit: 2400s

@@ -147,7 +147,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/QF_NIA-ucore.html b/archive/2017/results/QF_NIA-ucore.html index ad8ad206..235e2209 100644 --- a/archive/2017/results/QF_NIA-ucore.html +++ b/archive/2017/results/QF_NIA-ucore.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -97,7 +97,7 @@

QF_NIA (Unsat Core Track)

Competition results for the QF_NIA division as of Tue Jul 18 22:06:21 GMT

-

Benchmarks in this division : 3130 +

Benchmarks in this division : 3130
Time Limit: 2400s

@@ -171,7 +171,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/QF_NIA.html b/archive/2017/results/QF_NIA.html index 1be32392..57536106 100644 --- a/archive/2017/results/QF_NIA.html +++ b/archive/2017/results/QF_NIA.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -235,7 +235,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/QF_NIRA-ucore.html b/archive/2017/results/QF_NIRA-ucore.html index 239ee185..cd1539a2 100644 --- a/archive/2017/results/QF_NIRA-ucore.html +++ b/archive/2017/results/QF_NIRA-ucore.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -170,7 +170,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/QF_NIRA.html b/archive/2017/results/QF_NIRA.html index ab0805b2..894515dd 100644 --- a/archive/2017/results/QF_NIRA.html +++ b/archive/2017/results/QF_NIRA.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -219,7 +219,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/QF_NRA-ucore.html b/archive/2017/results/QF_NRA-ucore.html index c346a3b9..3df32eed 100644 --- a/archive/2017/results/QF_NRA-ucore.html +++ b/archive/2017/results/QF_NRA-ucore.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -170,7 +170,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/QF_NRA.html b/archive/2017/results/QF_NRA.html index 46db4f6c..5c0447ed 100644 --- a/archive/2017/results/QF_NRA.html +++ b/archive/2017/results/QF_NRA.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -235,7 +235,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/QF_RDL-ucore.html b/archive/2017/results/QF_RDL-ucore.html index 9cb3d1a1..df076ed0 100644 --- a/archive/2017/results/QF_RDL-ucore.html +++ b/archive/2017/results/QF_RDL-ucore.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -197,7 +197,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/QF_RDL.html b/archive/2017/results/QF_RDL.html index 5bf3c3a6..4cf45c72 100644 --- a/archive/2017/results/QF_RDL.html +++ b/archive/2017/results/QF_RDL.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -235,7 +235,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/QF_UF-ucore.html b/archive/2017/results/QF_UF-ucore.html index 980e990c..78a23783 100644 --- a/archive/2017/results/QF_UF-ucore.html +++ b/archive/2017/results/QF_UF-ucore.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -97,7 +97,7 @@

QF_UF (Unsat Core Track)

Competition results for the QF_UF division as of Tue Jul 18 22:06:21 GMT

-

Benchmarks in this division : 4101 +

Benchmarks in this division : 4101
Time Limit: 2400s

@@ -211,7 +211,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/QF_UF.html b/archive/2017/results/QF_UF.html index 2ee8811f..85989395 100644 --- a/archive/2017/results/QF_UF.html +++ b/archive/2017/results/QF_UF.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -97,7 +97,7 @@

QF_UF (Main Track)

Competition results for the QF_UF division as of Fri Jul 21 10:18:02 GMT

-

Benchmarks in this division : 6650 +

Benchmarks in this division : 6650
Time Limit: 1200s

@@ -267,7 +267,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/QF_UFBV-ucore.html b/archive/2017/results/QF_UFBV-ucore.html index 437f4bdf..223bdd61 100644 --- a/archive/2017/results/QF_UFBV-ucore.html +++ b/archive/2017/results/QF_UFBV-ucore.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -187,7 +187,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/QF_UFBV.html b/archive/2017/results/QF_UFBV.html index 38fd07ef..fd991b5b 100644 --- a/archive/2017/results/QF_UFBV.html +++ b/archive/2017/results/QF_UFBV.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -236,7 +236,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/QF_UFIDL-ucore.html b/archive/2017/results/QF_UFIDL-ucore.html index b059e816..932c315d 100644 --- a/archive/2017/results/QF_UFIDL-ucore.html +++ b/archive/2017/results/QF_UFIDL-ucore.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -196,7 +196,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/QF_UFIDL.html b/archive/2017/results/QF_UFIDL.html index a9f8617f..233ba964 100644 --- a/archive/2017/results/QF_UFIDL.html +++ b/archive/2017/results/QF_UFIDL.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -235,7 +235,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/QF_UFLIA-app.html b/archive/2017/results/QF_UFLIA-app.html index e7f77829..5822ce21 100644 --- a/archive/2017/results/QF_UFLIA-app.html +++ b/archive/2017/results/QF_UFLIA-app.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -97,7 +97,7 @@

QF_UFLIA (Application Track)

Competition results for the QF_UFLIA division as of Tue Jul 18 22:06:21 GMT

-

Benchmarks in this division : 780 +

Benchmarks in this division : 780
Time Limit: 2400s

@@ -169,7 +169,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/QF_UFLIA-ucore.html b/archive/2017/results/QF_UFLIA-ucore.html index 895ee742..615f5f9a 100644 --- a/archive/2017/results/QF_UFLIA-ucore.html +++ b/archive/2017/results/QF_UFLIA-ucore.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -211,7 +211,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/QF_UFLIA.html b/archive/2017/results/QF_UFLIA.html index a74e1793..a54a17f8 100644 --- a/archive/2017/results/QF_UFLIA.html +++ b/archive/2017/results/QF_UFLIA.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -252,7 +252,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/QF_UFLRA-app.html b/archive/2017/results/QF_UFLRA-app.html index 8e4cc4ee..faf721bc 100644 --- a/archive/2017/results/QF_UFLRA-app.html +++ b/archive/2017/results/QF_UFLRA-app.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -97,7 +97,7 @@

QF_UFLRA (Application Track)

Competition results for the QF_UFLRA division as of Tue Jul 18 22:06:21 GMT

-

Benchmarks in this division : 3056 +

Benchmarks in this division : 3056
Time Limit: 2400s

@@ -169,7 +169,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/QF_UFLRA-ucore.html b/archive/2017/results/QF_UFLRA-ucore.html index 6e69d1ed..49192df0 100644 --- a/archive/2017/results/QF_UFLRA-ucore.html +++ b/archive/2017/results/QF_UFLRA-ucore.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -211,7 +211,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/QF_UFLRA.html b/archive/2017/results/QF_UFLRA.html index e2c02f37..5672b8ce 100644 --- a/archive/2017/results/QF_UFLRA.html +++ b/archive/2017/results/QF_UFLRA.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -251,7 +251,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/QF_UFNIA-app.html b/archive/2017/results/QF_UFNIA-app.html index 55bf5998..331c893c 100644 --- a/archive/2017/results/QF_UFNIA-app.html +++ b/archive/2017/results/QF_UFNIA-app.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -147,7 +147,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/QF_UFNIA-ucore.html b/archive/2017/results/QF_UFNIA-ucore.html index 4c635523..b82d0adc 100644 --- a/archive/2017/results/QF_UFNIA-ucore.html +++ b/archive/2017/results/QF_UFNIA-ucore.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -170,7 +170,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/QF_UFNIA.html b/archive/2017/results/QF_UFNIA.html index 0506a848..e45f2d05 100644 --- a/archive/2017/results/QF_UFNIA.html +++ b/archive/2017/results/QF_UFNIA.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -205,7 +205,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/QF_UFNRA-ucore.html b/archive/2017/results/QF_UFNRA-ucore.html index 4b407cd1..bd7dc5d3 100644 --- a/archive/2017/results/QF_UFNRA-ucore.html +++ b/archive/2017/results/QF_UFNRA-ucore.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -170,7 +170,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/QF_UFNRA.html b/archive/2017/results/QF_UFNRA.html index fe34c765..bfd3e372 100644 --- a/archive/2017/results/QF_UFNRA.html +++ b/archive/2017/results/QF_UFNRA.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -219,7 +219,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/UF-ucore.html b/archive/2017/results/UF-ucore.html index c5ce6eae..b4100dbb 100644 --- a/archive/2017/results/UF-ucore.html +++ b/archive/2017/results/UF-ucore.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -171,7 +171,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/UF.html b/archive/2017/results/UF.html index 83cc5d4b..7c4cd8bb 100644 --- a/archive/2017/results/UF.html +++ b/archive/2017/results/UF.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -220,7 +220,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/UFBV-ucore.html b/archive/2017/results/UFBV-ucore.html index 99c69ec1..447e884d 100644 --- a/archive/2017/results/UFBV-ucore.html +++ b/archive/2017/results/UFBV-ucore.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -171,7 +171,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/UFBV.html b/archive/2017/results/UFBV.html index 91d4a59d..64bd7b34 100644 --- a/archive/2017/results/UFBV.html +++ b/archive/2017/results/UFBV.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -97,7 +97,7 @@

UFBV (Main Track)

Competition results for the UFBV division as of Fri Jul 21 10:18:02 GMT

-

Benchmarks in this division : 200 +

Benchmarks in this division : 200
Time Limit: 1200s

@@ -178,7 +178,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/UFDT.html b/archive/2017/results/UFDT.html index 86f5e0bf..68c3e399 100644 --- a/archive/2017/results/UFDT.html +++ b/archive/2017/results/UFDT.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -175,7 +175,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/UFDTLIA.html b/archive/2017/results/UFDTLIA.html index a23f69bb..b532ca5a 100644 --- a/archive/2017/results/UFDTLIA.html +++ b/archive/2017/results/UFDTLIA.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -97,7 +97,7 @@

UFDTLIA (Main Track)

Competition results for the UFDTLIA division as of Fri Jul 21 10:18:02 GMT

-

Benchmarks in this division : 303 +

Benchmarks in this division : 303
Time Limit: 1200s

@@ -175,7 +175,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/UFIDL-ucore.html b/archive/2017/results/UFIDL-ucore.html index 8e2ff6f1..44d78fa0 100644 --- a/archive/2017/results/UFIDL-ucore.html +++ b/archive/2017/results/UFIDL-ucore.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -170,7 +170,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/UFIDL.html b/archive/2017/results/UFIDL.html index 5f0edcee..c00c6201 100644 --- a/archive/2017/results/UFIDL.html +++ b/archive/2017/results/UFIDL.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -219,7 +219,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/UFLIA-ucore.html b/archive/2017/results/UFLIA-ucore.html index e562cc07..2cf538ab 100644 --- a/archive/2017/results/UFLIA-ucore.html +++ b/archive/2017/results/UFLIA-ucore.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -169,7 +169,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/UFLIA.html b/archive/2017/results/UFLIA.html index 424cf187..e3247a1c 100644 --- a/archive/2017/results/UFLIA.html +++ b/archive/2017/results/UFLIA.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -97,7 +97,7 @@

UFLIA (Main Track)

Competition results for the UFLIA division as of Fri Jul 21 10:18:02 GMT

-

Benchmarks in this division : 10136 +

Benchmarks in this division : 10136
Time Limit: 1200s

@@ -220,7 +220,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/UFLRA-app.html b/archive/2017/results/UFLRA-app.html index 0d971a96..27a7b050 100644 --- a/archive/2017/results/UFLRA-app.html +++ b/archive/2017/results/UFLRA-app.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -97,7 +97,7 @@

UFLRA (Application Track)

Competition results for the UFLRA division as of Tue Jul 18 22:06:21 GMT

-

Benchmarks in this division : 1870 +

Benchmarks in this division : 1870
Time Limit: 2400s

@@ -147,7 +147,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/UFLRA-ucore.html b/archive/2017/results/UFLRA-ucore.html index 9e4ea30f..b7f16bf7 100644 --- a/archive/2017/results/UFLRA-ucore.html +++ b/archive/2017/results/UFLRA-ucore.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -97,7 +97,7 @@

UFLRA (Unsat Core Track)

Competition results for the UFLRA division as of Tue Jul 18 22:06:21 GMT

-

Benchmarks in this division : 10 +

Benchmarks in this division : 10
Time Limit: 2400s

@@ -170,7 +170,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/UFLRA.html b/archive/2017/results/UFLRA.html index 8481ac39..22f9337b 100644 --- a/archive/2017/results/UFLRA.html +++ b/archive/2017/results/UFLRA.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -219,7 +219,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/UFNIA-ucore.html b/archive/2017/results/UFNIA-ucore.html index 1684452d..0e21dbf5 100644 --- a/archive/2017/results/UFNIA-ucore.html +++ b/archive/2017/results/UFNIA-ucore.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -170,7 +170,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/UFNIA.html b/archive/2017/results/UFNIA.html index bc7b735f..6fc4cc96 100644 --- a/archive/2017/results/UFNIA.html +++ b/archive/2017/results/UFNIA.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -97,7 +97,7 @@

UFNIA (Main Track)

Competition results for the UFNIA division as of Fri Jul 21 10:18:02 GMT

-

Benchmarks in this division : 3308 +

Benchmarks in this division : 3308
Time Limit: 1200s

@@ -204,7 +204,6 @@

Parallel Performance

- + - diff --git a/archive/2017/results/competition-main.html b/archive/2017/results/competition-main.html index 50ea523a..9dd4a28d 100644 --- a/archive/2017/results/competition-main.html +++ b/archive/2017/results/competition-main.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -348,7 +348,6 @@

Competition-Wide Scoring fo - + - diff --git a/archive/2017/results/summary-app.html b/archive/2017/results/summary-app.html index 0670a489..825235a7 100644 --- a/archive/2017/results/summary-app.html +++ b/archive/2017/results/summary-app.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -269,7 +269,6 @@

Application Track (Summary)

- + - diff --git a/archive/2017/results/summary-main.html b/archive/2017/results/summary-main.html index 16d013cc..40bc898b 100644 --- a/archive/2017/results/summary-main.html +++ b/archive/2017/results/summary-main.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -818,7 +818,6 @@

Main Track (Summary)

- + - diff --git a/archive/2017/results/summary-ucore.html b/archive/2017/results/summary-ucore.html index ad3f829a..155886c9 100644 --- a/archive/2017/results/summary-ucore.html +++ b/archive/2017/results/summary-ucore.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -714,7 +714,6 @@

Unsat Core Track (Summary)

- + - diff --git a/archive/2017/specs.html b/archive/2017/specs.html index 3a516a46..733b2290 100644 --- a/archive/2017/specs.html +++ b/archive/2017/specs.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -115,7 +115,6 @@

Machine Specifications

- + - diff --git a/archive/2017/tools.html b/archive/2017/tools.html index 563a2321..91bc715d 100644 --- a/archive/2017/tools.html +++ b/archive/2017/tools.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2017 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -70,8 +70,8 @@

Tools

Pre-Processor (Benchmark Scrambler)

GitHub Repository
Sources

SMT-COMP 2017 Releases

    - - + +
  • Main Track
    • Binary @@ -79,8 +79,8 @@

      SMT-COMP 2017 Releases

      available on StarExec as SMT-COMP 2017 Scrambler (id: 349)
  • - - + +
  • Application Track
    For the Application Track, on StarExec, the 2016 benchmark scrambler was used.
    • @@ -89,8 +89,8 @@

      SMT-COMP 2017 Releases

      available on StarExec as SMT-COMP 2016 Application Scrambler (id: 285)
  • - - + +
  • Unsat Core Track
    • Binary @@ -98,15 +98,15 @@

      SMT-COMP 2017 Releases

      available on StarExec as SMT-COMP 2017 Unsat-Core Scrambler (id: 350)
  • - - + +

Post-Processor

GitHub Repository
Sources

SMT-COMP 2017 Releases

    - - + +
  • Main Track
    • Binary @@ -114,8 +114,8 @@

      SMT-COMP 2017 Releases

      available on StarExec as SMT-COMP 2017 (id: 348)
  • - - + +
  • Application Track
    For the Application Track, on StarExec, the 2016 post-processor was used.
    • @@ -124,8 +124,8 @@

      SMT-COMP 2017 Releases

      available on StarExec as SMT-COMP 2016 Application Track (id: 274)
  • - - + +
  • Unsat Core Track
    • Binary @@ -133,8 +133,8 @@

      SMT-COMP 2017 Releases

      available on StarExec as SMT-COMP 2017 Unsat-Core Track (id: 351)
  • - - + +

Trace executor

@@ -144,7 +144,7 @@

Trace executor


All solvers wrapped with the Trace executor are available here.

- + @@ -157,7 +157,6 @@

Trace executor

- + - diff --git a/archive/2018/benchmarks.html b/archive/2018/benchmarks.html index 7dee8aec..77ebbc04 100644 --- a/archive/2018/benchmarks.html +++ b/archive/2018/benchmarks.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2018 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -874,7 +874,6 @@

Unsat-Core Track

- + - diff --git a/archive/2018/benchmarks.md b/archive/2018/benchmarks.md index b93941fd..6e41b09f 100644 --- a/archive/2018/benchmarks.md +++ b/archive/2018/benchmarks.md @@ -1,7 +1,7 @@

Benchmarks

SMT-COMP 2018 will use a large subset of the benchmarks available within the 2018-05-20 release of [SMT-LIB](http://smtlib.cs.uiowa.edu/), as described in -the competition rules. +the competition rules. The SMT-LIB benchmarks are available in space [root/SMT/SMT-LIB benchmarks/2018-05-20](https://www.starexec.org/starexec/secure/explore/spaces.jsp?id=294532) @@ -799,4 +799,3 @@ QF_SLIA, which contains strings, is experimental in 2018. unsat-core track divisions in SMT-COMP 2018.

- diff --git a/archive/2018/call-for-comments.txt b/archive/2018/call-for-comments.txt index b181ed2e..15c21d67 100644 --- a/archive/2018/call-for-comments.txt +++ b/archive/2018/call-for-comments.txt @@ -33,7 +33,7 @@ committee. The organizing team for SMT-COMP'18 is This is a call for three things: -CALL FOR COMMENTS: +CALL FOR COMMENTS: The organizing team is preparing the schedule and rules for 2018. Any comments you may have to improve the competition over past years or to diff --git a/archive/2018/index.html b/archive/2018/index.html index 706a6a3b..eb9b705d 100644 --- a/archive/2018/index.html +++ b/archive/2018/index.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2018 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -126,7 +126,6 @@

Organizers

- + - diff --git a/archive/2018/news.html b/archive/2018/news.html index df06c8a1..2a0462fe 100644 --- a/archive/2018/news.html +++ b/archive/2018/news.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2018 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -86,7 +86,6 @@

- + - diff --git a/archive/2018/news/2018-03-16.html b/archive/2018/news/2018-03-16.html index 7fe99863..3c83ef70 100644 --- a/archive/2018/news/2018-03-16.html +++ b/archive/2018/news/2018-03-16.html @@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2018 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -91,7 +91,6 @@

- + - diff --git a/archive/2018/news/2018-04-17.html b/archive/2018/news/2018-04-17.html index 1899df1f..b8daf37e 100644 --- a/archive/2018/news/2018-04-17.html +++ b/archive/2018/news/2018-04-17.html @@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2018 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -80,7 +80,6 @@

SMT-COMP 2018 draft rules posted

- + - diff --git a/archive/2018/news/2018-05-21.html b/archive/2018/news/2018-05-21.html index 3fcaf2b9..aa15a3de 100644 --- a/archive/2018/news/2018-05-21.html +++ b/archive/2018/news/2018-05-21.html @@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2018 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -82,7 +82,6 @@

SMT-COMP 2018 finall call for solvers issued

- + - diff --git a/archive/2018/news/2018-06-12.html b/archive/2018/news/2018-06-12.html index 2aaf9a45..8b3dcf8e 100644 --- a/archive/2018/news/2018-06-12.html +++ b/archive/2018/news/2018-06-12.html @@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2018 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -81,7 +81,6 @@

SMT-COMP 2018 competition jobs started

- + - diff --git a/archive/2018/news/2018-07-13.html b/archive/2018/news/2018-07-13.html index e5d31399..84a9068d 100644 --- a/archive/2018/news/2018-07-13.html +++ b/archive/2018/news/2018-07-13.html @@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2018 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -80,7 +80,6 @@

SMT-COMP 2018 results presented

- + - diff --git a/archive/2018/news/2018-07-14.html b/archive/2018/news/2018-07-14.html index cec7ea7d..1be9625f 100644 --- a/archive/2018/news/2018-07-14.html +++ b/archive/2018/news/2018-07-14.html @@ -35,7 +35,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -56,7 +56,7 @@

SMT-COMP 2018 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -80,7 +80,6 @@

SMT-COMP 2018 FLoC Olympic Games award presentation

- + - diff --git a/archive/2018/participants.html b/archive/2018/participants.html index 823d617d..5ec1a9f7 100644 --- a/archive/2018/participants.html +++ b/archive/2018/participants.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2018 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -2197,7 +2197,6 @@

Divisions

- + - diff --git a/archive/2018/results.html b/archive/2018/results.html index bda0caba..28303e8d 100644 --- a/archive/2018/results.html +++ b/archive/2018/results.html @@ -34,7 +34,7 @@

SMT-COMP

- +

The International Satisfiability Modulo Theories (SMT) Competition.

@@ -55,7 +55,7 @@

SMT-COMP 2018 Rules
Benchmarks
Tools
Specs
Participants
Results
Slides
Report

- + @@ -78,7 +78,7 @@

SMT-COMP 2018 Results

ABVFP (Main Track)

  • -

    ALIA (Main Track, +

    ALIA (Main Track, Application Track, Unsat Core Track)

  • @@ -303,7 +303,6 @@

    SMT-COMP 2018 Results

    - + - diff --git a/archive/2018/results/ABVFP.html b/archive/2018/results/ABVFP.html index 3ac4b7d3..024e7e78 100644 --- a/archive/2018/results/ABVFP.html +++ b/archive/2018/results/ABVFP.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -149,7 +149,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/ALIA-app.html b/archive/2018/results/ALIA-app.html index 5af7559f..98ba8520 100644 --- a/archive/2018/results/ALIA-app.html +++ b/archive/2018/results/ALIA-app.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -129,7 +129,6 @@

    Result table1

    - + - diff --git a/archive/2018/results/ALIA-ucore.html b/archive/2018/results/ALIA-ucore.html index 6df7f4fa..05f3d2b1 100644 --- a/archive/2018/results/ALIA-ucore.html +++ b/archive/2018/results/ALIA-ucore.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -144,7 +144,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/ALIA.html b/archive/2018/results/ALIA.html index f921ecdb..64a7683b 100644 --- a/archive/2018/results/ALIA.html +++ b/archive/2018/results/ALIA.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -190,7 +190,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/ANIA-app.html b/archive/2018/results/ANIA-app.html index 031bf20e..c2f5c511 100644 --- a/archive/2018/results/ANIA-app.html +++ b/archive/2018/results/ANIA-app.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -129,7 +129,6 @@

    Result table1

    - + - diff --git a/archive/2018/results/AUFBVDTLIA-ucore.html b/archive/2018/results/AUFBVDTLIA-ucore.html index 36306020..f07904a5 100644 --- a/archive/2018/results/AUFBVDTLIA-ucore.html +++ b/archive/2018/results/AUFBVDTLIA-ucore.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -140,7 +140,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/AUFBVDTLIA.html b/archive/2018/results/AUFBVDTLIA.html index e8f23c86..b9112154 100644 --- a/archive/2018/results/AUFBVDTLIA.html +++ b/archive/2018/results/AUFBVDTLIA.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -149,7 +149,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/AUFDTLIA.html b/archive/2018/results/AUFDTLIA.html index 02295cab..aef622f0 100644 --- a/archive/2018/results/AUFDTLIA.html +++ b/archive/2018/results/AUFDTLIA.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -163,7 +163,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/AUFLIA-ucore.html b/archive/2018/results/AUFLIA-ucore.html index 0d52d6ce..7c957a9e 100644 --- a/archive/2018/results/AUFLIA-ucore.html +++ b/archive/2018/results/AUFLIA-ucore.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -144,7 +144,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/AUFLIA.html b/archive/2018/results/AUFLIA.html index a4cb0163..cd98f3a1 100644 --- a/archive/2018/results/AUFLIA.html +++ b/archive/2018/results/AUFLIA.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -190,7 +190,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/AUFLIRA-ucore.html b/archive/2018/results/AUFLIRA-ucore.html index 49a50ce6..a0fd2c88 100644 --- a/archive/2018/results/AUFLIRA-ucore.html +++ b/archive/2018/results/AUFLIRA-ucore.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -144,7 +144,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/AUFLIRA.html b/archive/2018/results/AUFLIRA.html index e6f8cb7c..97f83013 100644 --- a/archive/2018/results/AUFLIRA.html +++ b/archive/2018/results/AUFLIRA.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -190,7 +190,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/AUFNIRA-app.html b/archive/2018/results/AUFNIRA-app.html index ab2ca572..0c2a3b2b 100644 --- a/archive/2018/results/AUFNIRA-app.html +++ b/archive/2018/results/AUFNIRA-app.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -129,7 +129,6 @@

    Result table1

    - + - diff --git a/archive/2018/results/AUFNIRA-ucore.html b/archive/2018/results/AUFNIRA-ucore.html index c34cc3d5..bff64f16 100644 --- a/archive/2018/results/AUFNIRA-ucore.html +++ b/archive/2018/results/AUFNIRA-ucore.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -144,7 +144,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/AUFNIRA.html b/archive/2018/results/AUFNIRA.html index 760618c5..4b6720c1 100644 --- a/archive/2018/results/AUFNIRA.html +++ b/archive/2018/results/AUFNIRA.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -181,7 +181,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/BV-app.html b/archive/2018/results/BV-app.html index 21660455..05635a63 100644 --- a/archive/2018/results/BV-app.html +++ b/archive/2018/results/BV-app.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -129,7 +129,6 @@

    Result table1

    - + - diff --git a/archive/2018/results/BV-ucore.html b/archive/2018/results/BV-ucore.html index 9b71aaa7..e8398652 100644 --- a/archive/2018/results/BV-ucore.html +++ b/archive/2018/results/BV-ucore.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -144,7 +144,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/BV.html b/archive/2018/results/BV.html index fac3fe14..f8df2013 100644 --- a/archive/2018/results/BV.html +++ b/archive/2018/results/BV.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -181,7 +181,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/BVFP.html b/archive/2018/results/BVFP.html index e4685c44..9a594666 100644 --- a/archive/2018/results/BVFP.html +++ b/archive/2018/results/BVFP.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -149,7 +149,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/FP.html b/archive/2018/results/FP.html index 04916f90..48e5830f 100644 --- a/archive/2018/results/FP.html +++ b/archive/2018/results/FP.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -149,7 +149,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/LIA-app.html b/archive/2018/results/LIA-app.html index 1137281f..c5dd9b99 100644 --- a/archive/2018/results/LIA-app.html +++ b/archive/2018/results/LIA-app.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -129,7 +129,6 @@

    Result table1

    - + - diff --git a/archive/2018/results/LIA-ucore.html b/archive/2018/results/LIA-ucore.html index 8237f99f..ab8ab647 100644 --- a/archive/2018/results/LIA-ucore.html +++ b/archive/2018/results/LIA-ucore.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -144,7 +144,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/LIA.html b/archive/2018/results/LIA.html index fcf7bf43..85089a8d 100644 --- a/archive/2018/results/LIA.html +++ b/archive/2018/results/LIA.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -181,7 +181,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/LRA-ucore.html b/archive/2018/results/LRA-ucore.html index 70583427..2dcc6376 100644 --- a/archive/2018/results/LRA-ucore.html +++ b/archive/2018/results/LRA-ucore.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -144,7 +144,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/LRA.html b/archive/2018/results/LRA.html index f99e3f9a..0e974f78 100644 --- a/archive/2018/results/LRA.html +++ b/archive/2018/results/LRA.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -172,7 +172,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/NIA-ucore.html b/archive/2018/results/NIA-ucore.html index 3d45d334..55f6a817 100644 --- a/archive/2018/results/NIA-ucore.html +++ b/archive/2018/results/NIA-ucore.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -144,7 +144,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/NIA.html b/archive/2018/results/NIA.html index b48ba84c..3ac7173a 100644 --- a/archive/2018/results/NIA.html +++ b/archive/2018/results/NIA.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -172,7 +172,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/NRA-ucore.html b/archive/2018/results/NRA-ucore.html index 6d2d3b17..4a2467f8 100644 --- a/archive/2018/results/NRA-ucore.html +++ b/archive/2018/results/NRA-ucore.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -144,7 +144,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/NRA.html b/archive/2018/results/NRA.html index 9585f3ca..05d41750 100644 --- a/archive/2018/results/NRA.html +++ b/archive/2018/results/NRA.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -172,7 +172,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/QF_ABV-app.html b/archive/2018/results/QF_ABV-app.html index bc62636f..60f2626d 100644 --- a/archive/2018/results/QF_ABV-app.html +++ b/archive/2018/results/QF_ABV-app.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -133,7 +133,6 @@

    Result table1

    - + - diff --git a/archive/2018/results/QF_ABV-ucore.html b/archive/2018/results/QF_ABV-ucore.html index 70c3e8fe..4af00efe 100644 --- a/archive/2018/results/QF_ABV-ucore.html +++ b/archive/2018/results/QF_ABV-ucore.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -160,7 +160,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/QF_ABV.html b/archive/2018/results/QF_ABV.html index fa4952cf..c36b1cc3 100644 --- a/archive/2018/results/QF_ABV.html +++ b/archive/2018/results/QF_ABV.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -190,7 +190,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/QF_ABVFP-ucore.html b/archive/2018/results/QF_ABVFP-ucore.html index 02554068..9995aeb4 100644 --- a/archive/2018/results/QF_ABVFP-ucore.html +++ b/archive/2018/results/QF_ABVFP-ucore.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -140,7 +140,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/QF_ABVFP.html b/archive/2018/results/QF_ABVFP.html index a5c2f420..8719b7ed 100644 --- a/archive/2018/results/QF_ABVFP.html +++ b/archive/2018/results/QF_ABVFP.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -163,7 +163,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/QF_ALIA-app.html b/archive/2018/results/QF_ALIA-app.html index 57086f71..065c0e3c 100644 --- a/archive/2018/results/QF_ALIA-app.html +++ b/archive/2018/results/QF_ALIA-app.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -135,7 +135,6 @@

    Result table1

    - + - diff --git a/archive/2018/results/QF_ALIA-ucore.html b/archive/2018/results/QF_ALIA-ucore.html index 4013036a..ba6cb111 100644 --- a/archive/2018/results/QF_ALIA-ucore.html +++ b/archive/2018/results/QF_ALIA-ucore.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -163,7 +163,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/QF_ALIA.html b/archive/2018/results/QF_ALIA.html index af70c5ef..c7d24699 100644 --- a/archive/2018/results/QF_ALIA.html +++ b/archive/2018/results/QF_ALIA.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -199,7 +199,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/QF_ANIA-app.html b/archive/2018/results/QF_ANIA-app.html index 87bb7c74..8d60fe7f 100644 --- a/archive/2018/results/QF_ANIA-app.html +++ b/archive/2018/results/QF_ANIA-app.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -129,7 +129,6 @@

    Result table1

    - + - diff --git a/archive/2018/results/QF_ANIA-ucore.html b/archive/2018/results/QF_ANIA-ucore.html index 93dcdd5a..74806fc3 100644 --- a/archive/2018/results/QF_ANIA-ucore.html +++ b/archive/2018/results/QF_ANIA-ucore.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -144,7 +144,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/QF_ANIA.html b/archive/2018/results/QF_ANIA.html index 8ef3843b..6f1d5451 100644 --- a/archive/2018/results/QF_ANIA.html +++ b/archive/2018/results/QF_ANIA.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -158,7 +158,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/QF_AUFBV-app.html b/archive/2018/results/QF_AUFBV-app.html index db3e9caf..0e2bf1fb 100644 --- a/archive/2018/results/QF_AUFBV-app.html +++ b/archive/2018/results/QF_AUFBV-app.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -131,7 +131,6 @@

    Result table1

    - + - diff --git a/archive/2018/results/QF_AUFBV-ucore.html b/archive/2018/results/QF_AUFBV-ucore.html index a760ef82..d2fe6466 100644 --- a/archive/2018/results/QF_AUFBV-ucore.html +++ b/archive/2018/results/QF_AUFBV-ucore.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -159,7 +159,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/QF_AUFBV.html b/archive/2018/results/QF_AUFBV.html index 3f827de0..44c72fdd 100644 --- a/archive/2018/results/QF_AUFBV.html +++ b/archive/2018/results/QF_AUFBV.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -190,7 +190,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/QF_AUFLIA-app.html b/archive/2018/results/QF_AUFLIA-app.html index 800494b4..366e9b15 100644 --- a/archive/2018/results/QF_AUFLIA-app.html +++ b/archive/2018/results/QF_AUFLIA-app.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -135,7 +135,6 @@

    Result table1

    - + - diff --git a/archive/2018/results/QF_AUFLIA-ucore.html b/archive/2018/results/QF_AUFLIA-ucore.html index 9a942819..9f8bd5b8 100644 --- a/archive/2018/results/QF_AUFLIA-ucore.html +++ b/archive/2018/results/QF_AUFLIA-ucore.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -163,7 +163,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/QF_AUFLIA.html b/archive/2018/results/QF_AUFLIA.html index 79d1f671..a441fb60 100644 --- a/archive/2018/results/QF_AUFLIA.html +++ b/archive/2018/results/QF_AUFLIA.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -199,7 +199,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/QF_AUFNIA-ucore.html b/archive/2018/results/QF_AUFNIA-ucore.html index 3248e411..8af6265a 100644 --- a/archive/2018/results/QF_AUFNIA-ucore.html +++ b/archive/2018/results/QF_AUFNIA-ucore.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -144,7 +144,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/QF_AUFNIA.html b/archive/2018/results/QF_AUFNIA.html index 11310d79..f09bd65f 100644 --- a/archive/2018/results/QF_AUFNIA.html +++ b/archive/2018/results/QF_AUFNIA.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -158,7 +158,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/QF_AX-ucore.html b/archive/2018/results/QF_AX-ucore.html index b3169b90..7824250d 100644 --- a/archive/2018/results/QF_AX-ucore.html +++ b/archive/2018/results/QF_AX-ucore.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -163,7 +163,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/QF_AX.html b/archive/2018/results/QF_AX.html index 6e41250c..84dd16e3 100644 --- a/archive/2018/results/QF_AX.html +++ b/archive/2018/results/QF_AX.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -190,7 +190,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/QF_BV-app.html b/archive/2018/results/QF_BV-app.html index 5b9f633c..888accaa 100644 --- a/archive/2018/results/QF_BV-app.html +++ b/archive/2018/results/QF_BV-app.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -135,7 +135,6 @@

    Result table1

    - + - diff --git a/archive/2018/results/QF_BV-ucore.html b/archive/2018/results/QF_BV-ucore.html index ba9f40d2..7507e92d 100644 --- a/archive/2018/results/QF_BV-ucore.html +++ b/archive/2018/results/QF_BV-ucore.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -159,7 +159,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/QF_BV.html b/archive/2018/results/QF_BV.html index 8bf21882..1f500d48 100644 --- a/archive/2018/results/QF_BV.html +++ b/archive/2018/results/QF_BV.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -235,7 +235,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/QF_BVFP-app.html b/archive/2018/results/QF_BVFP-app.html index dbf6c4eb..9bbde9ea 100644 --- a/archive/2018/results/QF_BVFP-app.html +++ b/archive/2018/results/QF_BVFP-app.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -129,7 +129,6 @@

    Result table1

    - + - diff --git a/archive/2018/results/QF_BVFP-ucore.html b/archive/2018/results/QF_BVFP-ucore.html index 80949f0a..2ce8a326 100644 --- a/archive/2018/results/QF_BVFP-ucore.html +++ b/archive/2018/results/QF_BVFP-ucore.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -144,7 +144,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/QF_BVFP.html b/archive/2018/results/QF_BVFP.html index 4b179bca..942dd35f 100644 --- a/archive/2018/results/QF_BVFP.html +++ b/archive/2018/results/QF_BVFP.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -172,7 +172,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/QF_DT-ucore.html b/archive/2018/results/QF_DT-ucore.html index 2f54d01f..876c9c0f 100644 --- a/archive/2018/results/QF_DT-ucore.html +++ b/archive/2018/results/QF_DT-ucore.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -144,7 +144,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/QF_DT.html b/archive/2018/results/QF_DT.html index d57ff57e..bd3a40ec 100644 --- a/archive/2018/results/QF_DT.html +++ b/archive/2018/results/QF_DT.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -158,7 +158,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/QF_FP-app.html b/archive/2018/results/QF_FP-app.html index 4d7a61a0..f4516e43 100644 --- a/archive/2018/results/QF_FP-app.html +++ b/archive/2018/results/QF_FP-app.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -129,7 +129,6 @@

    Result table1

    - + - diff --git a/archive/2018/results/QF_FP-ucore.html b/archive/2018/results/QF_FP-ucore.html index 3b48134a..e4ad090b 100644 --- a/archive/2018/results/QF_FP-ucore.html +++ b/archive/2018/results/QF_FP-ucore.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -144,7 +144,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/QF_FP.html b/archive/2018/results/QF_FP.html index 34f7fd42..3e8f16a1 100644 --- a/archive/2018/results/QF_FP.html +++ b/archive/2018/results/QF_FP.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -172,7 +172,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/QF_IDL-ucore.html b/archive/2018/results/QF_IDL-ucore.html index b60b1d58..1c303d8b 100644 --- a/archive/2018/results/QF_IDL-ucore.html +++ b/archive/2018/results/QF_IDL-ucore.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -163,7 +163,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/QF_IDL.html b/archive/2018/results/QF_IDL.html index c16c7899..af9991ed 100644 --- a/archive/2018/results/QF_IDL.html +++ b/archive/2018/results/QF_IDL.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -217,7 +217,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/QF_LIA-app.html b/archive/2018/results/QF_LIA-app.html index 0c1fb1e3..5bb5939f 100644 --- a/archive/2018/results/QF_LIA-app.html +++ b/archive/2018/results/QF_LIA-app.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -135,7 +135,6 @@

    Result table1

    - + - diff --git a/archive/2018/results/QF_LIA-ucore.html b/archive/2018/results/QF_LIA-ucore.html index 18ff0906..6b8b01ca 100644 --- a/archive/2018/results/QF_LIA-ucore.html +++ b/archive/2018/results/QF_LIA-ucore.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -163,7 +163,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/QF_LIA.html b/archive/2018/results/QF_LIA.html index cbb50c39..c8d747a2 100644 --- a/archive/2018/results/QF_LIA.html +++ b/archive/2018/results/QF_LIA.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -226,7 +226,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/QF_LIRA-ucore.html b/archive/2018/results/QF_LIRA-ucore.html index 2d64576a..30f5b808 100644 --- a/archive/2018/results/QF_LIRA-ucore.html +++ b/archive/2018/results/QF_LIRA-ucore.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -159,7 +159,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/QF_LIRA.html b/archive/2018/results/QF_LIRA.html index 032d6562..057cbb6c 100644 --- a/archive/2018/results/QF_LIRA.html +++ b/archive/2018/results/QF_LIRA.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -190,7 +190,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/QF_LRA-app.html b/archive/2018/results/QF_LRA-app.html index a88d9103..d02ed196 100644 --- a/archive/2018/results/QF_LRA-app.html +++ b/archive/2018/results/QF_LRA-app.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -135,7 +135,6 @@

    Result table1

    - + - diff --git a/archive/2018/results/QF_LRA-ucore.html b/archive/2018/results/QF_LRA-ucore.html index 9529b8e2..3113794f 100644 --- a/archive/2018/results/QF_LRA-ucore.html +++ b/archive/2018/results/QF_LRA-ucore.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -163,7 +163,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/QF_LRA.html b/archive/2018/results/QF_LRA.html index cbb129ad..1d32910c 100644 --- a/archive/2018/results/QF_LRA.html +++ b/archive/2018/results/QF_LRA.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -244,7 +244,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/QF_NIA-app.html b/archive/2018/results/QF_NIA-app.html index 9da2d2ef..be4161d7 100644 --- a/archive/2018/results/QF_NIA-app.html +++ b/archive/2018/results/QF_NIA-app.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -131,7 +131,6 @@

    Result table1

    - + - diff --git a/archive/2018/results/QF_NIA-ucore.html b/archive/2018/results/QF_NIA-ucore.html index d44cc33b..e8df882b 100644 --- a/archive/2018/results/QF_NIA-ucore.html +++ b/archive/2018/results/QF_NIA-ucore.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -144,7 +144,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/QF_NIA.html b/archive/2018/results/QF_NIA.html index 59fe58a2..80c38baf 100644 --- a/archive/2018/results/QF_NIA.html +++ b/archive/2018/results/QF_NIA.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -190,7 +190,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/QF_NIRA-ucore.html b/archive/2018/results/QF_NIRA-ucore.html index ad57ee7a..3cc38a87 100644 --- a/archive/2018/results/QF_NIRA-ucore.html +++ b/archive/2018/results/QF_NIRA-ucore.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -144,7 +144,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/QF_NIRA.html b/archive/2018/results/QF_NIRA.html index 27d4b29d..271863ba 100644 --- a/archive/2018/results/QF_NIRA.html +++ b/archive/2018/results/QF_NIRA.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -181,7 +181,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/QF_NRA-ucore.html b/archive/2018/results/QF_NRA-ucore.html index 56417af5..1786f093 100644 --- a/archive/2018/results/QF_NRA-ucore.html +++ b/archive/2018/results/QF_NRA-ucore.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -144,7 +144,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/QF_NRA.html b/archive/2018/results/QF_NRA.html index 2c2bf7f2..6e5a534d 100644 --- a/archive/2018/results/QF_NRA.html +++ b/archive/2018/results/QF_NRA.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -199,7 +199,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/QF_RDL-ucore.html b/archive/2018/results/QF_RDL-ucore.html index afa6ca59..8d154bc8 100644 --- a/archive/2018/results/QF_RDL-ucore.html +++ b/archive/2018/results/QF_RDL-ucore.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -163,7 +163,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/QF_RDL.html b/archive/2018/results/QF_RDL.html index 700856df..8cef901c 100644 --- a/archive/2018/results/QF_RDL.html +++ b/archive/2018/results/QF_RDL.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -217,7 +217,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/QF_SLIA.html b/archive/2018/results/QF_SLIA.html index b45f827d..971737ec 100644 --- a/archive/2018/results/QF_SLIA.html +++ b/archive/2018/results/QF_SLIA.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -149,7 +149,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/QF_UF-ucore.html b/archive/2018/results/QF_UF-ucore.html index de4c32d8..586d6bdd 100644 --- a/archive/2018/results/QF_UF-ucore.html +++ b/archive/2018/results/QF_UF-ucore.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -163,7 +163,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/QF_UF.html b/archive/2018/results/QF_UF.html index 32a50029..b1e411d8 100644 --- a/archive/2018/results/QF_UF.html +++ b/archive/2018/results/QF_UF.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -208,7 +208,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/QF_UFBV-app.html b/archive/2018/results/QF_UFBV-app.html index cc6ddfb6..8dcfbc36 100644 --- a/archive/2018/results/QF_UFBV-app.html +++ b/archive/2018/results/QF_UFBV-app.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -133,7 +133,6 @@

    Result table1

    - + - diff --git a/archive/2018/results/QF_UFBV-ucore.html b/archive/2018/results/QF_UFBV-ucore.html index 817c7c36..e53408de 100644 --- a/archive/2018/results/QF_UFBV-ucore.html +++ b/archive/2018/results/QF_UFBV-ucore.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -159,7 +159,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/QF_UFBV.html b/archive/2018/results/QF_UFBV.html index 1f2c9658..edd6483a 100644 --- a/archive/2018/results/QF_UFBV.html +++ b/archive/2018/results/QF_UFBV.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -190,7 +190,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/QF_UFIDL-ucore.html b/archive/2018/results/QF_UFIDL-ucore.html index 6686db0c..62f4f4d9 100644 --- a/archive/2018/results/QF_UFIDL-ucore.html +++ b/archive/2018/results/QF_UFIDL-ucore.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -163,7 +163,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/QF_UFIDL.html b/archive/2018/results/QF_UFIDL.html index 911cc926..21277e78 100644 --- a/archive/2018/results/QF_UFIDL.html +++ b/archive/2018/results/QF_UFIDL.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -199,7 +199,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/QF_UFLIA-app.html b/archive/2018/results/QF_UFLIA-app.html index e7af5e4d..e577f74f 100644 --- a/archive/2018/results/QF_UFLIA-app.html +++ b/archive/2018/results/QF_UFLIA-app.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -135,7 +135,6 @@

    Result table1

    - + - diff --git a/archive/2018/results/QF_UFLIA-ucore.html b/archive/2018/results/QF_UFLIA-ucore.html index 7cae8368..196209e9 100644 --- a/archive/2018/results/QF_UFLIA-ucore.html +++ b/archive/2018/results/QF_UFLIA-ucore.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -163,7 +163,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/QF_UFLIA.html b/archive/2018/results/QF_UFLIA.html index ad13e79a..6d792c6a 100644 --- a/archive/2018/results/QF_UFLIA.html +++ b/archive/2018/results/QF_UFLIA.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -199,7 +199,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/QF_UFLRA-app.html b/archive/2018/results/QF_UFLRA-app.html index e0441e64..bf316754 100644 --- a/archive/2018/results/QF_UFLRA-app.html +++ b/archive/2018/results/QF_UFLRA-app.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -135,7 +135,6 @@

    Result table1

    - + - diff --git a/archive/2018/results/QF_UFLRA-ucore.html b/archive/2018/results/QF_UFLRA-ucore.html index bfb06c1f..1a1ce186 100644 --- a/archive/2018/results/QF_UFLRA-ucore.html +++ b/archive/2018/results/QF_UFLRA-ucore.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -163,7 +163,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/QF_UFLRA.html b/archive/2018/results/QF_UFLRA.html index 427d1200..d51dd815 100644 --- a/archive/2018/results/QF_UFLRA.html +++ b/archive/2018/results/QF_UFLRA.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -199,7 +199,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/QF_UFNIA-app.html b/archive/2018/results/QF_UFNIA-app.html index b21ef105..601435e6 100644 --- a/archive/2018/results/QF_UFNIA-app.html +++ b/archive/2018/results/QF_UFNIA-app.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -131,7 +131,6 @@

    Result table1

    - + - diff --git a/archive/2018/results/QF_UFNIA-ucore.html b/archive/2018/results/QF_UFNIA-ucore.html index 6f58ccb5..330731af 100644 --- a/archive/2018/results/QF_UFNIA-ucore.html +++ b/archive/2018/results/QF_UFNIA-ucore.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -144,7 +144,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/QF_UFNIA.html b/archive/2018/results/QF_UFNIA.html index a924e4ff..c7593560 100644 --- a/archive/2018/results/QF_UFNIA.html +++ b/archive/2018/results/QF_UFNIA.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -172,7 +172,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/QF_UFNRA-ucore.html b/archive/2018/results/QF_UFNRA-ucore.html index 9fe1a457..848106c3 100644 --- a/archive/2018/results/QF_UFNRA-ucore.html +++ b/archive/2018/results/QF_UFNRA-ucore.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -144,7 +144,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/QF_UFNRA.html b/archive/2018/results/QF_UFNRA.html index c65a749f..4fa3b752 100644 --- a/archive/2018/results/QF_UFNRA.html +++ b/archive/2018/results/QF_UFNRA.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -181,7 +181,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/UF-ucore.html b/archive/2018/results/UF-ucore.html index cdb96071..393374bc 100644 --- a/archive/2018/results/UF-ucore.html +++ b/archive/2018/results/UF-ucore.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -144,7 +144,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/UF.html b/archive/2018/results/UF.html index c7f66a8f..249c49eb 100644 --- a/archive/2018/results/UF.html +++ b/archive/2018/results/UF.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -181,7 +181,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/UFBV-ucore.html b/archive/2018/results/UFBV-ucore.html index aff60d61..6106e8ae 100644 --- a/archive/2018/results/UFBV-ucore.html +++ b/archive/2018/results/UFBV-ucore.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -144,7 +144,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/UFBV.html b/archive/2018/results/UFBV.html index 0bcb5b5b..8c6c8d9e 100644 --- a/archive/2018/results/UFBV.html +++ b/archive/2018/results/UFBV.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -158,7 +158,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/UFDT-ucore.html b/archive/2018/results/UFDT-ucore.html index 9e7efb02..36b27c32 100644 --- a/archive/2018/results/UFDT-ucore.html +++ b/archive/2018/results/UFDT-ucore.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -140,7 +140,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/UFDT.html b/archive/2018/results/UFDT.html index b2f77372..8633290a 100644 --- a/archive/2018/results/UFDT.html +++ b/archive/2018/results/UFDT.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -163,7 +163,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/UFDTLIA.html b/archive/2018/results/UFDTLIA.html index 780b746a..3fb7c386 100644 --- a/archive/2018/results/UFDTLIA.html +++ b/archive/2018/results/UFDTLIA.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -163,7 +163,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/UFIDL-ucore.html b/archive/2018/results/UFIDL-ucore.html index 5ac29220..2cd8dae1 100644 --- a/archive/2018/results/UFIDL-ucore.html +++ b/archive/2018/results/UFIDL-ucore.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -144,7 +144,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/UFIDL.html b/archive/2018/results/UFIDL.html index 0694ea38..8dd4af27 100644 --- a/archive/2018/results/UFIDL.html +++ b/archive/2018/results/UFIDL.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -181,7 +181,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/UFLIA-ucore.html b/archive/2018/results/UFLIA-ucore.html index 605e4716..06e43aad 100644 --- a/archive/2018/results/UFLIA-ucore.html +++ b/archive/2018/results/UFLIA-ucore.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -144,7 +144,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/UFLIA.html b/archive/2018/results/UFLIA.html index 0da7d519..23a81ed8 100644 --- a/archive/2018/results/UFLIA.html +++ b/archive/2018/results/UFLIA.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -181,7 +181,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/UFLRA-app.html b/archive/2018/results/UFLRA-app.html index 5865de7d..610ca721 100644 --- a/archive/2018/results/UFLRA-app.html +++ b/archive/2018/results/UFLRA-app.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -129,7 +129,6 @@

    Result table1

    - + - diff --git a/archive/2018/results/UFLRA-ucore.html b/archive/2018/results/UFLRA-ucore.html index 3edeb25c..1bccbbad 100644 --- a/archive/2018/results/UFLRA-ucore.html +++ b/archive/2018/results/UFLRA-ucore.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -144,7 +144,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/UFLRA.html b/archive/2018/results/UFLRA.html index cb7612c1..09e263ba 100644 --- a/archive/2018/results/UFLRA.html +++ b/archive/2018/results/UFLRA.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -181,7 +181,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/UFNIA-ucore.html b/archive/2018/results/UFNIA-ucore.html index 47ddb03f..03c05f76 100644 --- a/archive/2018/results/UFNIA-ucore.html +++ b/archive/2018/results/UFNIA-ucore.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -144,7 +144,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/UFNIA.html b/archive/2018/results/UFNIA.html index 73b772bd..f920275b 100644 --- a/archive/2018/results/UFNIA.html +++ b/archive/2018/results/UFNIA.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -172,7 +172,6 @@

    Parallel Performance

    - + - diff --git a/archive/2018/results/competition-main.html b/archive/2018/results/competition-main.html index 9c315edd..44901c9c 100644 --- a/archive/2018/results/competition-main.html +++ b/archive/2018/results/competition-main.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -177,7 +177,6 @@

    Competition-Wide Scoring fo - + - diff --git a/archive/2018/results/summary-app.html b/archive/2018/results/summary-app.html index afb25ac4..d38ef9ca 100644 --- a/archive/2018/results/summary-app.html +++ b/archive/2018/results/summary-app.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -297,7 +297,6 @@

    Application Track (Summary)

    - + - diff --git a/archive/2018/results/summary-main.html b/archive/2018/results/summary-main.html index 4ad21139..e3fe7078 100644 --- a/archive/2018/results/summary-main.html +++ b/archive/2018/results/summary-main.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -903,7 +903,6 @@

    Main Track (Summary)

    - + - diff --git a/archive/2018/results/summary-ucore.html b/archive/2018/results/summary-ucore.html index 9fefaa61..fd1169a1 100644 --- a/archive/2018/results/summary-ucore.html +++ b/archive/2018/results/summary-ucore.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -731,7 +731,6 @@

    Unsat Core Track (Summary)

    - + - diff --git a/archive/2018/slides.html b/archive/2018/slides.html index 9ddb36dd..e4a722bd 100644 --- a/archive/2018/slides.html +++ b/archive/2018/slides.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -81,7 +81,6 @@

    FLoC Olympic Games

    - + - diff --git a/archive/2018/specs.html b/archive/2018/specs.html index b40f4d28..1a3d96ba 100644 --- a/archive/2018/specs.html +++ b/archive/2018/specs.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -115,7 +115,6 @@

    Machine Specifications

    - + - diff --git a/archive/2018/tools.html b/archive/2018/tools.html index 4b67d006..fed12c18 100644 --- a/archive/2018/tools.html +++ b/archive/2018/tools.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2018 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -70,8 +70,8 @@

    Tools

    Pre-Processor (Benchmark Scrambler)

    GitHub Repository
    Sources

    SMT-COMP 2018 Releases

      - - + +
    • Main Track
      • Binary @@ -79,8 +79,8 @@

        SMT-COMP 2018 Releases

        available on StarExec as SMT-COMP 2018 Scrambler (id: 379)
    • - - + +
    • Application Track
      • Binary @@ -88,8 +88,8 @@

        SMT-COMP 2018 Releases

        available on StarExec as SMT-COMP 2018 Application Scrambler (id: 380)
    • - - + +
    • Unsat Core Track
      • Binary @@ -97,15 +97,15 @@

        SMT-COMP 2018 Releases

        available on StarExec as SMT-COMP 2018 Unsat-Core Scrambler (id: 381)
    • - - + +

    Post-Processor

    GitHub Repository
    Sources

    SMT-COMP 2018 Releases

      - - + +
    • Main Track
      • Binary @@ -113,8 +113,8 @@

        SMT-COMP 2018 Releases

        available on StarExec as SMT-COMP 2018 (id: 376)
    • - - + +
    • Application Track
      • Binary @@ -122,8 +122,8 @@

        SMT-COMP 2018 Releases

        available on StarExec as SMT-COMP 2018 Application Track (id: 377)
    • - - + +
    • Unsat Core Track
      • Binary @@ -131,8 +131,8 @@

        SMT-COMP 2018 Releases

        available on StarExec as SMT-COMP 2018 Unsat-Core Track (id: 389)
    • - - + +

    Trace executor

    GitHub Repository
    Sources
    Binary
    @@ -140,7 +140,7 @@

    Trace executor

    G
    All solvers wrapped with the Trace executor are available
    here.

    - + @@ -153,7 +153,6 @@

    Trace executor

    G - + - diff --git a/archive/2019/benchmarks.html b/archive/2019/benchmarks.html index d18c6dc1..ef08989a 100644 --- a/archive/2019/benchmarks.html +++ b/archive/2019/benchmarks.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -596,7 +596,6 @@

    Benchmarks

    - + - diff --git a/archive/2019/divisions/abvfp.html b/archive/2019/divisions/abvfp.html index bed5dfd6..cb6f16db 100644 --- a/archive/2019/divisions/abvfp.html +++ b/archive/2019/divisions/abvfp.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -95,7 +95,6 @@

    Tracks

    - + - diff --git a/archive/2019/divisions/alia.html b/archive/2019/divisions/alia.html index d11dd863..591ae4de 100644 --- a/archive/2019/divisions/alia.html +++ b/archive/2019/divisions/alia.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -95,7 +95,6 @@

    Tracks

    - + - diff --git a/archive/2019/divisions/ania.html b/archive/2019/divisions/ania.html index c44c42d8..fa3f27b3 100644 --- a/archive/2019/divisions/ania.html +++ b/archive/2019/divisions/ania.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -95,7 +95,6 @@

    Tracks

    - + - diff --git a/archive/2019/divisions/aufbvdtlia.html b/archive/2019/divisions/aufbvdtlia.html index 68bf8bde..06ab575f 100644 --- a/archive/2019/divisions/aufbvdtlia.html +++ b/archive/2019/divisions/aufbvdtlia.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -102,7 +102,6 @@

    Notes

    - + - diff --git a/archive/2019/divisions/aufdtlia.html b/archive/2019/divisions/aufdtlia.html index 8934b229..bb884f62 100644 --- a/archive/2019/divisions/aufdtlia.html +++ b/archive/2019/divisions/aufdtlia.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -99,7 +99,6 @@

    Notes

    - + - diff --git a/archive/2019/divisions/auflia.html b/archive/2019/divisions/auflia.html index 7507dd2a..705a71ff 100644 --- a/archive/2019/divisions/auflia.html +++ b/archive/2019/divisions/auflia.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -95,7 +95,6 @@

    Tracks

    - + - diff --git a/archive/2019/divisions/auflira.html b/archive/2019/divisions/auflira.html index 3db3592f..5719d15e 100644 --- a/archive/2019/divisions/auflira.html +++ b/archive/2019/divisions/auflira.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -95,7 +95,6 @@

    Tracks

    - + - diff --git a/archive/2019/divisions/aufnia.html b/archive/2019/divisions/aufnia.html index da653a06..39e70b08 100644 --- a/archive/2019/divisions/aufnia.html +++ b/archive/2019/divisions/aufnia.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -95,7 +95,6 @@

    Tracks

    - + - diff --git a/archive/2019/divisions/aufnira.html b/archive/2019/divisions/aufnira.html index f3f491bb..3a63baf6 100644 --- a/archive/2019/divisions/aufnira.html +++ b/archive/2019/divisions/aufnira.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -95,7 +95,6 @@

    Tracks

    - + - diff --git a/archive/2019/divisions/bv.html b/archive/2019/divisions/bv.html index 67df6099..9577dfa1 100644 --- a/archive/2019/divisions/bv.html +++ b/archive/2019/divisions/bv.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -95,7 +95,6 @@

    Tracks

    - + - diff --git a/archive/2019/divisions/bvfp.html b/archive/2019/divisions/bvfp.html index 5cb2fb60..4db19ec3 100644 --- a/archive/2019/divisions/bvfp.html +++ b/archive/2019/divisions/bvfp.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -95,7 +95,6 @@

    Tracks

    - + - diff --git a/archive/2019/divisions/fp.html b/archive/2019/divisions/fp.html index db5b7919..f366ea62 100644 --- a/archive/2019/divisions/fp.html +++ b/archive/2019/divisions/fp.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -95,7 +95,6 @@

    Tracks

    - + - diff --git a/archive/2019/divisions/lia.html b/archive/2019/divisions/lia.html index adc8a503..2bdd1c9c 100644 --- a/archive/2019/divisions/lia.html +++ b/archive/2019/divisions/lia.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -95,7 +95,6 @@

    Tracks

    - + - diff --git a/archive/2019/divisions/lra.html b/archive/2019/divisions/lra.html index 38a5e0a8..53a871e3 100644 --- a/archive/2019/divisions/lra.html +++ b/archive/2019/divisions/lra.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -95,7 +95,6 @@

    Tracks

    - + - diff --git a/archive/2019/divisions/nia.html b/archive/2019/divisions/nia.html index 07b10823..8f4bf559 100644 --- a/archive/2019/divisions/nia.html +++ b/archive/2019/divisions/nia.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -95,7 +95,6 @@

    Tracks

    - + - diff --git a/archive/2019/divisions/nra.html b/archive/2019/divisions/nra.html index d0dda0cf..aeb6635e 100644 --- a/archive/2019/divisions/nra.html +++ b/archive/2019/divisions/nra.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -95,7 +95,6 @@

    Tracks

    - + - diff --git a/archive/2019/divisions/qf-abv.html b/archive/2019/divisions/qf-abv.html index d77fe43f..794826f6 100644 --- a/archive/2019/divisions/qf-abv.html +++ b/archive/2019/divisions/qf-abv.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -95,7 +95,6 @@

    Tracks

    - + - diff --git a/archive/2019/divisions/qf-abvfp.html b/archive/2019/divisions/qf-abvfp.html index 8c656456..a68bc124 100644 --- a/archive/2019/divisions/qf-abvfp.html +++ b/archive/2019/divisions/qf-abvfp.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -103,7 +103,6 @@

    Notes

    - + - diff --git a/archive/2019/divisions/qf-alia.html b/archive/2019/divisions/qf-alia.html index 6317c169..79f2d7ba 100644 --- a/archive/2019/divisions/qf-alia.html +++ b/archive/2019/divisions/qf-alia.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -95,7 +95,6 @@

    Tracks

    - + - diff --git a/archive/2019/divisions/qf-ania.html b/archive/2019/divisions/qf-ania.html index 170574c7..f354d735 100644 --- a/archive/2019/divisions/qf-ania.html +++ b/archive/2019/divisions/qf-ania.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -95,7 +95,6 @@

    Tracks

    - + - diff --git a/archive/2019/divisions/qf-aufbv.html b/archive/2019/divisions/qf-aufbv.html index 3932ce60..3b86f2e7 100644 --- a/archive/2019/divisions/qf-aufbv.html +++ b/archive/2019/divisions/qf-aufbv.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -95,7 +95,6 @@

    Tracks

    - + - diff --git a/archive/2019/divisions/qf-aufbvlia.html b/archive/2019/divisions/qf-aufbvlia.html index 686bc03c..5dbe1083 100644 --- a/archive/2019/divisions/qf-aufbvlia.html +++ b/archive/2019/divisions/qf-aufbvlia.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -101,7 +101,6 @@

    Notes

    - + - diff --git a/archive/2019/divisions/qf-aufbvnia.html b/archive/2019/divisions/qf-aufbvnia.html index 4aedd01c..93a9199e 100644 --- a/archive/2019/divisions/qf-aufbvnia.html +++ b/archive/2019/divisions/qf-aufbvnia.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -95,7 +95,6 @@

    Tracks

    - + - diff --git a/archive/2019/divisions/qf-auflia.html b/archive/2019/divisions/qf-auflia.html index 12af22f0..c2eac47b 100644 --- a/archive/2019/divisions/qf-auflia.html +++ b/archive/2019/divisions/qf-auflia.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -95,7 +95,6 @@

    Tracks

    - + - diff --git a/archive/2019/divisions/qf-aufnia.html b/archive/2019/divisions/qf-aufnia.html index 25abb7f8..4e55375a 100644 --- a/archive/2019/divisions/qf-aufnia.html +++ b/archive/2019/divisions/qf-aufnia.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -95,7 +95,6 @@

    Tracks

    - + - diff --git a/archive/2019/divisions/qf-ax.html b/archive/2019/divisions/qf-ax.html index bdece94b..be07451d 100644 --- a/archive/2019/divisions/qf-ax.html +++ b/archive/2019/divisions/qf-ax.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -95,7 +95,6 @@

    Tracks

    - + - diff --git a/archive/2019/divisions/qf-bv.html b/archive/2019/divisions/qf-bv.html index 69eee63b..fdf6ee3d 100644 --- a/archive/2019/divisions/qf-bv.html +++ b/archive/2019/divisions/qf-bv.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -95,7 +95,6 @@

    Tracks

    - + - diff --git a/archive/2019/divisions/qf-bvfp.html b/archive/2019/divisions/qf-bvfp.html index 6f1afd63..7c9b7f95 100644 --- a/archive/2019/divisions/qf-bvfp.html +++ b/archive/2019/divisions/qf-bvfp.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -102,7 +102,6 @@

    Notes

    - + - diff --git a/archive/2019/divisions/qf-bvfplra.html b/archive/2019/divisions/qf-bvfplra.html index 3bfd7da2..7f522f4b 100644 --- a/archive/2019/divisions/qf-bvfplra.html +++ b/archive/2019/divisions/qf-bvfplra.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -102,7 +102,6 @@

    Notes

    - + - diff --git a/archive/2019/divisions/qf-dt.html b/archive/2019/divisions/qf-dt.html index 9cd0fbd5..ececc821 100644 --- a/archive/2019/divisions/qf-dt.html +++ b/archive/2019/divisions/qf-dt.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -101,7 +101,6 @@

    Notes

    - + - diff --git a/archive/2019/divisions/qf-fp.html b/archive/2019/divisions/qf-fp.html index 3313071a..0d6ec3a5 100644 --- a/archive/2019/divisions/qf-fp.html +++ b/archive/2019/divisions/qf-fp.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -111,7 +111,6 @@

    Notes

    - + - diff --git a/archive/2019/divisions/qf-fplra.html b/archive/2019/divisions/qf-fplra.html index 4be9ef48..ffbb7af1 100644 --- a/archive/2019/divisions/qf-fplra.html +++ b/archive/2019/divisions/qf-fplra.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -102,7 +102,6 @@

    Notes

    - + - diff --git a/archive/2019/divisions/qf-idl.html b/archive/2019/divisions/qf-idl.html index 82311060..4acaba1c 100644 --- a/archive/2019/divisions/qf-idl.html +++ b/archive/2019/divisions/qf-idl.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -95,7 +95,6 @@

    Tracks

    - + - diff --git a/archive/2019/divisions/qf-lia.html b/archive/2019/divisions/qf-lia.html index 12e47233..732d08e8 100644 --- a/archive/2019/divisions/qf-lia.html +++ b/archive/2019/divisions/qf-lia.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -95,7 +95,6 @@

    Tracks

    - + - diff --git a/archive/2019/divisions/qf-lira.html b/archive/2019/divisions/qf-lira.html index ee136614..b255d530 100644 --- a/archive/2019/divisions/qf-lira.html +++ b/archive/2019/divisions/qf-lira.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -95,7 +95,6 @@

    Tracks

    - + - diff --git a/archive/2019/divisions/qf-lra.html b/archive/2019/divisions/qf-lra.html index 47bc384c..a95da47c 100644 --- a/archive/2019/divisions/qf-lra.html +++ b/archive/2019/divisions/qf-lra.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -95,7 +95,6 @@

    Tracks

    - + - diff --git a/archive/2019/divisions/qf-nia.html b/archive/2019/divisions/qf-nia.html index a515c2d6..66693aa5 100644 --- a/archive/2019/divisions/qf-nia.html +++ b/archive/2019/divisions/qf-nia.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -95,7 +95,6 @@

    Tracks

    - + - diff --git a/archive/2019/divisions/qf-nira.html b/archive/2019/divisions/qf-nira.html index 959b9a06..fc747241 100644 --- a/archive/2019/divisions/qf-nira.html +++ b/archive/2019/divisions/qf-nira.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -95,7 +95,6 @@

    Tracks

    - + - diff --git a/archive/2019/divisions/qf-nra.html b/archive/2019/divisions/qf-nra.html index db0f642b..93d756d4 100644 --- a/archive/2019/divisions/qf-nra.html +++ b/archive/2019/divisions/qf-nra.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -95,7 +95,6 @@

    Tracks

    - + - diff --git a/archive/2019/divisions/qf-rdl.html b/archive/2019/divisions/qf-rdl.html index c13c964c..03a62cc1 100644 --- a/archive/2019/divisions/qf-rdl.html +++ b/archive/2019/divisions/qf-rdl.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -95,7 +95,6 @@

    Tracks

    - + - diff --git a/archive/2019/divisions/qf-s.html b/archive/2019/divisions/qf-s.html index a887589d..2829ec37 100644 --- a/archive/2019/divisions/qf-s.html +++ b/archive/2019/divisions/qf-s.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -100,7 +100,6 @@

    Notes

    - + - diff --git a/archive/2019/divisions/qf-slia.html b/archive/2019/divisions/qf-slia.html index 0ec9d15d..19c18ca6 100644 --- a/archive/2019/divisions/qf-slia.html +++ b/archive/2019/divisions/qf-slia.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -102,7 +102,6 @@

    Notes

    - + - diff --git a/archive/2019/divisions/qf-uf.html b/archive/2019/divisions/qf-uf.html index 957430fe..28278cf6 100644 --- a/archive/2019/divisions/qf-uf.html +++ b/archive/2019/divisions/qf-uf.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -95,7 +95,6 @@

    Tracks

    - + - diff --git a/archive/2019/divisions/qf-ufbv.html b/archive/2019/divisions/qf-ufbv.html index 19fe5fb2..59efdf49 100644 --- a/archive/2019/divisions/qf-ufbv.html +++ b/archive/2019/divisions/qf-ufbv.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -95,7 +95,6 @@

    Tracks

    - + - diff --git a/archive/2019/divisions/qf-ufbvlia.html b/archive/2019/divisions/qf-ufbvlia.html index 12b2e816..342d8dc0 100644 --- a/archive/2019/divisions/qf-ufbvlia.html +++ b/archive/2019/divisions/qf-ufbvlia.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -101,7 +101,6 @@

    Notes

    - + - diff --git a/archive/2019/divisions/qf-ufidl.html b/archive/2019/divisions/qf-ufidl.html index 2deb65da..1ea96e69 100644 --- a/archive/2019/divisions/qf-ufidl.html +++ b/archive/2019/divisions/qf-ufidl.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -95,7 +95,6 @@

    Tracks

    - + - diff --git a/archive/2019/divisions/qf-uflia.html b/archive/2019/divisions/qf-uflia.html index e6a48f65..d9cbe96e 100644 --- a/archive/2019/divisions/qf-uflia.html +++ b/archive/2019/divisions/qf-uflia.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -95,7 +95,6 @@

    Tracks

    - + - diff --git a/archive/2019/divisions/qf-uflra.html b/archive/2019/divisions/qf-uflra.html index 771c35aa..2ea34dc4 100644 --- a/archive/2019/divisions/qf-uflra.html +++ b/archive/2019/divisions/qf-uflra.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -95,7 +95,6 @@

    Tracks

    - + - diff --git a/archive/2019/divisions/qf-ufnia.html b/archive/2019/divisions/qf-ufnia.html index 773a0126..d3285fa8 100644 --- a/archive/2019/divisions/qf-ufnia.html +++ b/archive/2019/divisions/qf-ufnia.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -95,7 +95,6 @@

    Tracks

    - + - diff --git a/archive/2019/divisions/qf-ufnra.html b/archive/2019/divisions/qf-ufnra.html index 2a3254d1..e97d9cff 100644 --- a/archive/2019/divisions/qf-ufnra.html +++ b/archive/2019/divisions/qf-ufnra.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -95,7 +95,6 @@

    Tracks

    - + - diff --git a/archive/2019/divisions/uf.html b/archive/2019/divisions/uf.html index 299bbd80..e8da8844 100644 --- a/archive/2019/divisions/uf.html +++ b/archive/2019/divisions/uf.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -95,7 +95,6 @@

    Tracks

    - + - diff --git a/archive/2019/divisions/ufbv.html b/archive/2019/divisions/ufbv.html index 81113e98..b9e007c9 100644 --- a/archive/2019/divisions/ufbv.html +++ b/archive/2019/divisions/ufbv.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -95,7 +95,6 @@

    Tracks

    - + - diff --git a/archive/2019/divisions/ufdt.html b/archive/2019/divisions/ufdt.html index 38e1a8ed..d9a8743b 100644 --- a/archive/2019/divisions/ufdt.html +++ b/archive/2019/divisions/ufdt.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -101,7 +101,6 @@

    Notes

    - + - diff --git a/archive/2019/divisions/ufdtlia.html b/archive/2019/divisions/ufdtlia.html index 508d2703..ad201ee5 100644 --- a/archive/2019/divisions/ufdtlia.html +++ b/archive/2019/divisions/ufdtlia.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -99,7 +99,6 @@

    Notes

    - + - diff --git a/archive/2019/divisions/ufdtnia.html b/archive/2019/divisions/ufdtnia.html index 77580b54..6a44aca5 100644 --- a/archive/2019/divisions/ufdtnia.html +++ b/archive/2019/divisions/ufdtnia.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -99,7 +99,6 @@

    Notes

    - + - diff --git a/archive/2019/divisions/ufidl.html b/archive/2019/divisions/ufidl.html index 73961b19..b5b89907 100644 --- a/archive/2019/divisions/ufidl.html +++ b/archive/2019/divisions/ufidl.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -95,7 +95,6 @@

    Tracks

    - + - diff --git a/archive/2019/divisions/uflia.html b/archive/2019/divisions/uflia.html index c2e182ac..1119f820 100644 --- a/archive/2019/divisions/uflia.html +++ b/archive/2019/divisions/uflia.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -95,7 +95,6 @@

    Tracks

    - + - diff --git a/archive/2019/divisions/uflra.html b/archive/2019/divisions/uflra.html index 858cc58d..a727143f 100644 --- a/archive/2019/divisions/uflra.html +++ b/archive/2019/divisions/uflra.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -95,7 +95,6 @@

    Tracks

    - + - diff --git a/archive/2019/divisions/ufnia.html b/archive/2019/divisions/ufnia.html index e399187b..c0bdaead 100644 --- a/archive/2019/divisions/ufnia.html +++ b/archive/2019/divisions/ufnia.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -95,7 +95,6 @@

    Tracks

    - + - diff --git a/archive/2019/index.html b/archive/2019/index.html index d9e643cd..ca4cbdce 100644 --- a/archive/2019/index.html +++ b/archive/2019/index.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -128,7 +128,6 @@

    Acknowledgment

    - + - diff --git a/archive/2019/news.html b/archive/2019/news.html index e5012bc6..774ddec9 100644 --- a/archive/2019/news.html +++ b/archive/2019/news.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,7 +82,6 @@

    SMT-COM - + - diff --git a/archive/2019/news/2019-01-24.html b/archive/2019/news/2019-01-24.html index 5ab85335..dcc95dee 100644 --- a/archive/2019/news/2019-01-24.html +++ b/archive/2019/news/2019-01-24.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -412,7 +412,6 @@

    COMMUNICATION:

    - + - diff --git a/archive/2019/news/2019-04-19.html b/archive/2019/news/2019-04-19.html index bcd21e4e..c4c5ece5 100644 --- a/archive/2019/news/2019-04-19.html +++ b/archive/2019/news/2019-04-19.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -81,7 +81,6 @@

    SMT-COMP 2019 rules draft available

    - + - diff --git a/archive/2019/news/2019-06-06.html b/archive/2019/news/2019-06-06.html index 21271f88..e28b86b8 100644 --- a/archive/2019/news/2019-06-06.html +++ b/archive/2019/news/2019-06-06.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -80,7 +80,6 @@

    SMT-COMP 2019 started running

    - + - diff --git a/archive/2019/news/2019-07-07.html b/archive/2019/news/2019-07-07.html index 2ba4e0a1..fc51424c 100644 --- a/archive/2019/news/2019-07-07.html +++ b/archive/2019/news/2019-07-07.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -81,7 +81,6 @@

    SMT-COMP 2019 results available

    - + - diff --git a/archive/2019/participants.html b/archive/2019/participants.html index 3ef13ba5..3a9d3818 100644 --- a/archive/2019/participants.html +++ b/archive/2019/participants.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -5394,7 +5394,6 @@

    UFNIA

    - + - diff --git a/archive/2019/participants/2018-boolector-incremental.html b/archive/2019/participants/2018-boolector-incremental.html index 2178f1ef..1f9d9619 100644 --- a/archive/2019/participants/2018-boolector-incremental.html +++ b/archive/2019/participants/2018-boolector-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -140,8 +140,6 @@

    2018-Boolector (incremental)

    - + - - diff --git a/archive/2019/participants/2018-boolector.html b/archive/2019/participants/2018-boolector.html index 47616256..301dd741 100644 --- a/archive/2019/participants/2018-boolector.html +++ b/archive/2019/participants/2018-boolector.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -140,8 +140,6 @@

    2018-Boolector

    - + - - diff --git a/archive/2019/participants/2018-colibri.html b/archive/2019/participants/2018-colibri.html index 5d35dabf..517eb8c4 100644 --- a/archive/2019/participants/2018-colibri.html +++ b/archive/2019/participants/2018-colibri.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -140,8 +140,6 @@

    2018-COLIBRI

    - + - - diff --git a/archive/2019/participants/2018-cvc4-incremental.html b/archive/2019/participants/2018-cvc4-incremental.html index f184f846..006577b8 100644 --- a/archive/2019/participants/2018-cvc4-incremental.html +++ b/archive/2019/participants/2018-cvc4-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -140,8 +140,6 @@

    2018-CVC4 (incremental)

    - + - - diff --git a/archive/2019/participants/2018-cvc4-unsat-core.html b/archive/2019/participants/2018-cvc4-unsat-core.html index d89d1b7a..91df35bf 100644 --- a/archive/2019/participants/2018-cvc4-unsat-core.html +++ b/archive/2019/participants/2018-cvc4-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -140,8 +140,6 @@

    2018-CVC4 (unsat core)

    - + - - diff --git a/archive/2019/participants/2018-cvc4.html b/archive/2019/participants/2018-cvc4.html index 02b973b9..27932dbc 100644 --- a/archive/2019/participants/2018-cvc4.html +++ b/archive/2019/participants/2018-cvc4.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -140,8 +140,6 @@

    2018-CVC4

    - + - - diff --git a/archive/2019/participants/2018-mathsat-incremental.html b/archive/2019/participants/2018-mathsat-incremental.html index 11c3221f..d0fc725c 100644 --- a/archive/2019/participants/2018-mathsat-incremental.html +++ b/archive/2019/participants/2018-mathsat-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -140,8 +140,6 @@

    2018-MathSAT (incremental)

    - + - - diff --git a/archive/2019/participants/2018-mathsat-unsat-core.html b/archive/2019/participants/2018-mathsat-unsat-core.html index 1d4ab78a..887c2f95 100644 --- a/archive/2019/participants/2018-mathsat-unsat-core.html +++ b/archive/2019/participants/2018-mathsat-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -140,8 +140,6 @@

    2018-MathSAT (unsat core)

    - + - - diff --git a/archive/2019/participants/2018-minkeyrink-mt.html b/archive/2019/participants/2018-minkeyrink-mt.html index 6afdc6ec..bb54817c 100644 --- a/archive/2019/participants/2018-minkeyrink-mt.html +++ b/archive/2019/participants/2018-minkeyrink-mt.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -140,8 +140,6 @@

    2018-Minkeyrink MT

    - + - - diff --git a/archive/2019/participants/2018-smtinterpol-unsat-core.html b/archive/2019/participants/2018-smtinterpol-unsat-core.html index 5326fd91..e451bacb 100644 --- a/archive/2019/participants/2018-smtinterpol-unsat-core.html +++ b/archive/2019/participants/2018-smtinterpol-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -140,8 +140,6 @@

    2018-SMTInterpol (unsat core)

    - + - - diff --git a/archive/2019/participants/2018-smtrat-rat.html b/archive/2019/participants/2018-smtrat-rat.html index 7f0311c8..0c0558fc 100644 --- a/archive/2019/participants/2018-smtrat-rat.html +++ b/archive/2019/participants/2018-smtrat-rat.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -140,8 +140,6 @@

    2018-SMTRAT-Rat

    - + - - diff --git a/archive/2019/participants/2018-spass-satt.html b/archive/2019/participants/2018-spass-satt.html index d1efd36f..4f066f29 100644 --- a/archive/2019/participants/2018-spass-satt.html +++ b/archive/2019/participants/2018-spass-satt.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -140,8 +140,6 @@

    2018-SPASS-SATT

    - + - - diff --git a/archive/2019/participants/2018-vampire.html b/archive/2019/participants/2018-vampire.html index b11b9616..1a620e06 100644 --- a/archive/2019/participants/2018-vampire.html +++ b/archive/2019/participants/2018-vampire.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -140,8 +140,6 @@

    2018-Vampire

    - + - - diff --git a/archive/2019/participants/2018-yices-incremental.html b/archive/2019/participants/2018-yices-incremental.html index 0f2d5691..51e76872 100644 --- a/archive/2019/participants/2018-yices-incremental.html +++ b/archive/2019/participants/2018-yices-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -140,8 +140,6 @@

    2018-Yices (incremental)

    - + - - diff --git a/archive/2019/participants/2018-yices-unsat-core.html b/archive/2019/participants/2018-yices-unsat-core.html index 620e5463..805a487d 100644 --- a/archive/2019/participants/2018-yices-unsat-core.html +++ b/archive/2019/participants/2018-yices-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -140,8 +140,6 @@

    2018-Yices (unsat core)

    - + - - diff --git a/archive/2019/participants/2018-yices.html b/archive/2019/participants/2018-yices.html index 239b42ff..3ef3d47f 100644 --- a/archive/2019/participants/2018-yices.html +++ b/archive/2019/participants/2018-yices.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -140,8 +140,6 @@

    2018-Yices

    - + - - diff --git a/archive/2019/participants/2018-z3-incremental.html b/archive/2019/participants/2018-z3-incremental.html index d0cc7a06..a3e0ffae 100644 --- a/archive/2019/participants/2018-z3-incremental.html +++ b/archive/2019/participants/2018-z3-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -140,8 +140,6 @@

    2018-Z3 (incremental)

    - + - - diff --git a/archive/2019/participants/2018-z3-unsat-core.html b/archive/2019/participants/2018-z3-unsat-core.html index 94b15535..f9ba0035 100644 --- a/archive/2019/participants/2018-z3-unsat-core.html +++ b/archive/2019/participants/2018-z3-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -140,8 +140,6 @@

    2018-Z3 (unsat core)

    - + - - diff --git a/archive/2019/participants/2018-z3.html b/archive/2019/participants/2018-z3.html index fdd4f6fa..60797ad3 100644 --- a/archive/2019/participants/2018-z3.html +++ b/archive/2019/participants/2018-z3.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -140,8 +140,6 @@

    2018-Z3

    - + - - diff --git a/archive/2019/participants/alt-ergo.html b/archive/2019/participants/alt-ergo.html index 9f832ff8..e3e3a840 100644 --- a/archive/2019/participants/alt-ergo.html +++ b/archive/2019/participants/alt-ergo.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    Alt-Ergo

    - + - - diff --git a/archive/2019/participants/aprove.html b/archive/2019/participants/aprove.html index 54ed2700..88f54c52 100644 --- a/archive/2019/participants/aprove.html +++ b/archive/2019/participants/aprove.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    AProVE

    - + - - diff --git a/archive/2019/participants/boolector-incremental.html b/archive/2019/participants/boolector-incremental.html index 32267f77..2d4b9e7c 100644 --- a/archive/2019/participants/boolector-incremental.html +++ b/archive/2019/participants/boolector-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    Boolector (incremental)

    - + - - diff --git a/archive/2019/participants/boolector-reasonls.html b/archive/2019/participants/boolector-reasonls.html index 2d7a9af9..1c39c02b 100644 --- a/archive/2019/participants/boolector-reasonls.html +++ b/archive/2019/participants/boolector-reasonls.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    Boolector-ReasonLS

    - + - - diff --git a/archive/2019/participants/boolector.html b/archive/2019/participants/boolector.html index 00d90c9f..48644597 100644 --- a/archive/2019/participants/boolector.html +++ b/archive/2019/participants/boolector.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    Boolector

    - + - - diff --git a/archive/2019/participants/colibri.html b/archive/2019/participants/colibri.html index d0ee9614..ce5aaf42 100644 --- a/archive/2019/participants/colibri.html +++ b/archive/2019/participants/colibri.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    COLIBRI

    - + - - diff --git a/archive/2019/participants/ctrl-ergo.html b/archive/2019/participants/ctrl-ergo.html index e834912d..23a0d294 100644 --- a/archive/2019/participants/ctrl-ergo.html +++ b/archive/2019/participants/ctrl-ergo.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    Ctrl-Ergo

    - + - - diff --git a/archive/2019/participants/cvc4-inc-fixed.html b/archive/2019/participants/cvc4-inc-fixed.html index fe665c84..3db666fe 100644 --- a/archive/2019/participants/cvc4-inc-fixed.html +++ b/archive/2019/participants/cvc4-inc-fixed.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    CVC4-inc-fixed

    - + - - diff --git a/archive/2019/participants/cvc4-inc.html b/archive/2019/participants/cvc4-inc.html index 897d7753..735648d8 100644 --- a/archive/2019/participants/cvc4-inc.html +++ b/archive/2019/participants/cvc4-inc.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    CVC4-inc

    - + - - diff --git a/archive/2019/participants/cvc4-mv.html b/archive/2019/participants/cvc4-mv.html index 036dfe7b..83abef0d 100644 --- a/archive/2019/participants/cvc4-mv.html +++ b/archive/2019/participants/cvc4-mv.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    CVC4-mv

    - + - - diff --git a/archive/2019/participants/cvc4-symbreak.html b/archive/2019/participants/cvc4-symbreak.html index 23a9d746..927bd973 100644 --- a/archive/2019/participants/cvc4-symbreak.html +++ b/archive/2019/participants/cvc4-symbreak.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    CVC4-SymBreak

    - + - - diff --git a/archive/2019/participants/cvc4-uc.html b/archive/2019/participants/cvc4-uc.html index fd16fdb4..4a3a9e19 100644 --- a/archive/2019/participants/cvc4-uc.html +++ b/archive/2019/participants/cvc4-uc.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    CVC4-uc

    - + - - diff --git a/archive/2019/participants/cvc4.html b/archive/2019/participants/cvc4.html index 294fd1fa..10e502be 100644 --- a/archive/2019/participants/cvc4.html +++ b/archive/2019/participants/cvc4.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    CVC4

    - + - - diff --git a/archive/2019/participants/mathsat-default.html b/archive/2019/participants/mathsat-default.html index e8ae3675..7067546a 100644 --- a/archive/2019/participants/mathsat-default.html +++ b/archive/2019/participants/mathsat-default.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    MathSAT-default

    - + - - diff --git a/archive/2019/participants/mathsat-na-ext.html b/archive/2019/participants/mathsat-na-ext.html index ce2bb723..4b5997f2 100644 --- a/archive/2019/participants/mathsat-na-ext.html +++ b/archive/2019/participants/mathsat-na-ext.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    MathSAT-na-ext

    - + - - diff --git a/archive/2019/participants/minkeyrink-solver-mt.html b/archive/2019/participants/minkeyrink-solver-mt.html index 3131ff8e..788e386e 100644 --- a/archive/2019/participants/minkeyrink-solver-mt.html +++ b/archive/2019/participants/minkeyrink-solver-mt.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    Minkeyrink Solver MT

    - + - - diff --git a/archive/2019/participants/minkeyrink-solver.html b/archive/2019/participants/minkeyrink-solver.html index abe93f56..ff94f0c6 100644 --- a/archive/2019/participants/minkeyrink-solver.html +++ b/archive/2019/participants/minkeyrink-solver.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    Minkeyrink Solver

    - + - - diff --git a/archive/2019/participants/opensmt2.html b/archive/2019/participants/opensmt2.html index 4d36cb6d..fce960cf 100644 --- a/archive/2019/participants/opensmt2.html +++ b/archive/2019/participants/opensmt2.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    OpenSMT2

    - + - - diff --git a/archive/2019/participants/par4.html b/archive/2019/participants/par4.html index 55d9277b..872f0061 100644 --- a/archive/2019/participants/par4.html +++ b/archive/2019/participants/par4.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    Par4

    - + - - diff --git a/archive/2019/participants/poolector.html b/archive/2019/participants/poolector.html index 4a5dae52..f7bce6e6 100644 --- a/archive/2019/participants/poolector.html +++ b/archive/2019/participants/poolector.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    Poolector

    - + - - diff --git a/archive/2019/participants/prob.html b/archive/2019/participants/prob.html index 0aaa7279..9184f1a0 100644 --- a/archive/2019/participants/prob.html +++ b/archive/2019/participants/prob.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    ProB

    - + - - diff --git a/archive/2019/participants/q3b.html b/archive/2019/participants/q3b.html index 5b8045f3..70df5a56 100644 --- a/archive/2019/participants/q3b.html +++ b/archive/2019/participants/q3b.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    Q3B

    - + - - diff --git a/archive/2019/participants/smt-rat.html b/archive/2019/participants/smt-rat.html index ceb1596c..1a4b35cd 100644 --- a/archive/2019/participants/smt-rat.html +++ b/archive/2019/participants/smt-rat.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    SMT-RAT

    - + - - diff --git a/archive/2019/participants/smtinterpol.html b/archive/2019/participants/smtinterpol.html index 34f2f2c0..62b75245 100644 --- a/archive/2019/participants/smtinterpol.html +++ b/archive/2019/participants/smtinterpol.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    SMTInterpol

    - + - - diff --git a/archive/2019/participants/smtrat-mcsat.html b/archive/2019/participants/smtrat-mcsat.html index f38ae2ff..5b96ffa5 100644 --- a/archive/2019/participants/smtrat-mcsat.html +++ b/archive/2019/participants/smtrat-mcsat.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    SMTRAT-MCSAT

    - + - - diff --git a/archive/2019/participants/spass-satt.html b/archive/2019/participants/spass-satt.html index 52ce92ba..ead7c567 100644 --- a/archive/2019/participants/spass-satt.html +++ b/archive/2019/participants/spass-satt.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    SPASS-SATT

    - + - - diff --git a/archive/2019/participants/stp-incremental.html b/archive/2019/participants/stp-incremental.html index 1a9e4877..395d787d 100644 --- a/archive/2019/participants/stp-incremental.html +++ b/archive/2019/participants/stp-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    STP-incremental

    - + - - diff --git a/archive/2019/participants/stp-mergesat-fixed.html b/archive/2019/participants/stp-mergesat-fixed.html index e7eebd51..d171b3bf 100644 --- a/archive/2019/participants/stp-mergesat-fixed.html +++ b/archive/2019/participants/stp-mergesat-fixed.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    STP-mergesat-fixed

    - + - - diff --git a/archive/2019/participants/stp-mergesat.html b/archive/2019/participants/stp-mergesat.html index 0361c3f6..125135e7 100644 --- a/archive/2019/participants/stp-mergesat.html +++ b/archive/2019/participants/stp-mergesat.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    STP-mergesat

    - + - - diff --git a/archive/2019/participants/stp-minisat.html b/archive/2019/participants/stp-minisat.html index a5ae15fc..b7ad8245 100644 --- a/archive/2019/participants/stp-minisat.html +++ b/archive/2019/participants/stp-minisat.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    STP-minisat

    - + - - diff --git a/archive/2019/participants/stp-mt.html b/archive/2019/participants/stp-mt.html index a25931b1..bd1a4dee 100644 --- a/archive/2019/participants/stp-mt.html +++ b/archive/2019/participants/stp-mt.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    STP-mt

    - + - - diff --git a/archive/2019/participants/stp-portfolio-fixed.html b/archive/2019/participants/stp-portfolio-fixed.html index cbcb410f..18f5d4b9 100644 --- a/archive/2019/participants/stp-portfolio-fixed.html +++ b/archive/2019/participants/stp-portfolio-fixed.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    STP-portfolio-fixed

    - + - - diff --git a/archive/2019/participants/stp-portfolio.html b/archive/2019/participants/stp-portfolio.html index 86665f10..ea8f981a 100644 --- a/archive/2019/participants/stp-portfolio.html +++ b/archive/2019/participants/stp-portfolio.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    STP-portfolio

    - + - - diff --git a/archive/2019/participants/stp-riss.html b/archive/2019/participants/stp-riss.html index c3cac991..301245d2 100644 --- a/archive/2019/participants/stp-riss.html +++ b/archive/2019/participants/stp-riss.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    STP-riss

    - + - - diff --git a/archive/2019/participants/stp.html b/archive/2019/participants/stp.html index e2de0de4..b0decaf3 100644 --- a/archive/2019/participants/stp.html +++ b/archive/2019/participants/stp.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    STP

    - + - - diff --git a/archive/2019/participants/ultimateeliminator-mathsat-5-5-4.html b/archive/2019/participants/ultimateeliminator-mathsat-5-5-4.html index 2df712ea..2aa3a6eb 100644 --- a/archive/2019/participants/ultimateeliminator-mathsat-5-5-4.html +++ b/archive/2019/participants/ultimateeliminator-mathsat-5-5-4.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    UltimateEliminator+MathSAT-5.5.4

    - + - - diff --git a/archive/2019/participants/ultimateeliminator-smtinterpol.html b/archive/2019/participants/ultimateeliminator-smtinterpol.html index 3efe7a33..68d264a0 100644 --- a/archive/2019/participants/ultimateeliminator-smtinterpol.html +++ b/archive/2019/participants/ultimateeliminator-smtinterpol.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    UltimateEliminator+SMTInterpol

    - + - - diff --git a/archive/2019/participants/ultimateeliminator-yices-2-6-1.html b/archive/2019/participants/ultimateeliminator-yices-2-6-1.html index 77398ad3..6fe9239b 100644 --- a/archive/2019/participants/ultimateeliminator-yices-2-6-1.html +++ b/archive/2019/participants/ultimateeliminator-yices-2-6-1.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    UltimateEliminator+Yices-2.6.1

    - + - - diff --git a/archive/2019/participants/vampire.html b/archive/2019/participants/vampire.html index 72c2009a..b7d416e3 100644 --- a/archive/2019/participants/vampire.html +++ b/archive/2019/participants/vampire.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    Vampire

    - + - - diff --git a/archive/2019/participants/verit-rasat-redlog.html b/archive/2019/participants/verit-rasat-redlog.html index b6a36765..166b8ce0 100644 --- a/archive/2019/participants/verit-rasat-redlog.html +++ b/archive/2019/participants/verit-rasat-redlog.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    veriT+raSAT+Redlog

    - + - - diff --git a/archive/2019/participants/verit.html b/archive/2019/participants/verit.html index 23729171..cdeb8313 100644 --- a/archive/2019/participants/verit.html +++ b/archive/2019/participants/verit.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    veriT

    - + - - diff --git a/archive/2019/participants/yices-2-6-2-cadical-smt-lib2-models.html b/archive/2019/participants/yices-2-6-2-cadical-smt-lib2-models.html index f3cb9d1b..94987657 100644 --- a/archive/2019/participants/yices-2-6-2-cadical-smt-lib2-models.html +++ b/archive/2019/participants/yices-2-6-2-cadical-smt-lib2-models.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    Yices 2.6.2 CaDiCal/SMT-LIB2 Models

    - + - - diff --git a/archive/2019/participants/yices-2-6-2-cadical.html b/archive/2019/participants/yices-2-6-2-cadical.html index 3fbdad8f..f4f8f05a 100644 --- a/archive/2019/participants/yices-2-6-2-cadical.html +++ b/archive/2019/participants/yices-2-6-2-cadical.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    Yices 2.6.2 CaDiCal

    - + - - diff --git a/archive/2019/participants/yices-2-6-2-cryptominisat-smt-lib2-models.html b/archive/2019/participants/yices-2-6-2-cryptominisat-smt-lib2-models.html index 9de770e0..50df9802 100644 --- a/archive/2019/participants/yices-2-6-2-cryptominisat-smt-lib2-models.html +++ b/archive/2019/participants/yices-2-6-2-cryptominisat-smt-lib2-models.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    Yices 2.6.2 Cryptominisat/SMT-LIB2 Models

    - + - - diff --git a/archive/2019/participants/yices-2-6-2-cryptominisat.html b/archive/2019/participants/yices-2-6-2-cryptominisat.html index 77730d3b..e454c4c3 100644 --- a/archive/2019/participants/yices-2-6-2-cryptominisat.html +++ b/archive/2019/participants/yices-2-6-2-cryptominisat.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    Yices 2.6.2 Cryptominisat

    - + - - diff --git a/archive/2019/participants/yices-2-6-2-incremental.html b/archive/2019/participants/yices-2-6-2-incremental.html index 3047290d..b1dd6644 100644 --- a/archive/2019/participants/yices-2-6-2-incremental.html +++ b/archive/2019/participants/yices-2-6-2-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    Yices 2.6.2 Incremental

    - + - - diff --git a/archive/2019/participants/yices-2-6-2-mcsat-bv.html b/archive/2019/participants/yices-2-6-2-mcsat-bv.html index 5749bce1..3eb996ae 100644 --- a/archive/2019/participants/yices-2-6-2-mcsat-bv.html +++ b/archive/2019/participants/yices-2-6-2-mcsat-bv.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    Yices 2.6.2 mcsat-bv

    - + - - diff --git a/archive/2019/participants/yices-2-6-2-model-validation.html b/archive/2019/participants/yices-2-6-2-model-validation.html index c1dd1368..8df5264d 100644 --- a/archive/2019/participants/yices-2-6-2-model-validation.html +++ b/archive/2019/participants/yices-2-6-2-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    Yices 2.6.2 Model Validation

    - + - - diff --git a/archive/2019/participants/yices-2-6-2-new-bvsolver-with-smt2-models.html b/archive/2019/participants/yices-2-6-2-new-bvsolver-with-smt2-models.html index 57f9c331..399f3163 100644 --- a/archive/2019/participants/yices-2-6-2-new-bvsolver-with-smt2-models.html +++ b/archive/2019/participants/yices-2-6-2-new-bvsolver-with-smt2-models.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    Yices 2.6.2 New Bvsolver with SMT2 Models

    - + - - diff --git a/archive/2019/participants/yices-2-6-2-new-bvsolver.html b/archive/2019/participants/yices-2-6-2-new-bvsolver.html index 90ba1324..cf6b2862 100644 --- a/archive/2019/participants/yices-2-6-2-new-bvsolver.html +++ b/archive/2019/participants/yices-2-6-2-new-bvsolver.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    Yices 2.6.2 New Bvsolver

    - + - - diff --git a/archive/2019/participants/yices-2-6-2.html b/archive/2019/participants/yices-2-6-2.html index 8825d09e..6bf9c4de 100644 --- a/archive/2019/participants/yices-2-6-2.html +++ b/archive/2019/participants/yices-2-6-2.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    Yices 2.6.2

    - + - - diff --git a/archive/2019/participants/z3.html b/archive/2019/participants/z3.html index f409676e..8dddeb59 100644 --- a/archive/2019/participants/z3.html +++ b/archive/2019/participants/z3.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    Z3

    - + - - diff --git a/archive/2019/results.html b/archive/2019/results.html index 29f2115a..7af0e08b 100644 --- a/archive/2019/results.html +++ b/archive/2019/results.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -158,445 +158,445 @@

    Tracks Summary

    Divisions

    @@ -611,7 +611,6 @@

    Divisions

    - + - diff --git a/archive/2019/results/abvfp-incremental.html b/archive/2019/results/abvfp-incremental.html index 0d3805e6..f638ab48 100644 --- a/archive/2019/results/abvfp-incremental.html +++ b/archive/2019/results/abvfp-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    ABVFP (Incremental Track)

    Competition results for the ABVFP - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    ABVFP (Incremental Track)

    Parallel Performance CVC4-inc - - + + @@ -124,7 +124,7 @@

    ABVFP (Incremental Track)

    - + CVC4-inc 0 8182409.82409.77314511 @@ -133,7 +133,7 @@

    ABVFP (Incremental Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 24815.447.19120210 @@ -142,7 +142,7 @@

    ABVFP (Incremental Track)

    - + Z3n 0 00.0120.04122690 @@ -166,7 +166,6 @@

    ABVFP (Incremental Track)

    - + - diff --git a/archive/2019/results/abvfp-single-query.html b/archive/2019/results/abvfp-single-query.html index 4222ed5f..9b03dd8b 100644 --- a/archive/2019/results/abvfp-single-query.html +++ b/archive/2019/results/abvfp-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    ABVFP (Single Query Track)

    Competition results for the ABVFP - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    ABVFP (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) CVC4CVC4CVC4 - - + + — - - + + CVC4 - + @@ -131,7 +131,7 @@

    ABVFP (Single Query Track)

    - + 2018-CVC4n 0 1 @@ -142,7 +142,7 @@

    ABVFP (Single Query Track)

    - + CVC4 0 1 @@ -153,7 +153,7 @@

    ABVFP (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 0 @@ -164,7 +164,7 @@

    ABVFP (Single Query Track)

    - + Z3n 0 0 @@ -186,7 +186,7 @@

    ABVFP (Single Query Track)

    - + 2018-CVC4n 0 12.572.5711000 @@ -195,7 +195,7 @@

    ABVFP (Single Query Track)

    - + CVC4 0 13.43.411000 @@ -204,7 +204,7 @@

    ABVFP (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 03.5012.48200010 @@ -213,7 +213,7 @@

    ABVFP (Single Query Track)

    - + Z3n 0 02400.02400.000011 @@ -233,7 +233,7 @@

    ABVFP (Single Query Track)

    - + 2018-CVC4n 0 12.572.5711000 @@ -242,7 +242,7 @@

    ABVFP (Single Query Track)

    - + CVC4 0 13.43.411000 @@ -251,7 +251,7 @@

    ABVFP (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 03.5012.48200010 @@ -260,7 +260,7 @@

    ABVFP (Single Query Track)

    - + Z3n 0 02400.02400.000011 @@ -280,7 +280,7 @@

    ABVFP (Single Query Track)

    - + 2018-CVC4n 0 00.00.000010 @@ -289,7 +289,7 @@

    ABVFP (Single Query Track)

    - + CVC4 0 00.00.000010 @@ -298,7 +298,7 @@

    ABVFP (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 00.00.000010 @@ -307,7 +307,7 @@

    ABVFP (Single Query Track)

    - + Z3n 0 00.00.000011 @@ -327,7 +327,7 @@

    ABVFP (Single Query Track)

    - + 2018-CVC4n 0 12.572.5711000 @@ -336,7 +336,7 @@

    ABVFP (Single Query Track)

    - + CVC4 0 13.43.411000 @@ -345,7 +345,7 @@

    ABVFP (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 03.5012.48200010 @@ -354,7 +354,7 @@

    ABVFP (Single Query Track)

    - + Z3n 0 024.024.000011 @@ -378,7 +378,6 @@

    ABVFP (Single Query Track)

    - + - diff --git a/archive/2019/results/alia-incremental.html b/archive/2019/results/alia-incremental.html index 3ff2ce95..f999f75d 100644 --- a/archive/2019/results/alia-incremental.html +++ b/archive/2019/results/alia-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    ALIA (Incremental Track)

    Competition results for the ALIA - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    ALIA (Incremental Track)

    Parallel Performance CVC4-inc - - + + @@ -124,7 +124,7 @@

    ALIA (Incremental Track)

    - + 2018-Z3 (incremental)n 0 202552173.467160.62800 @@ -133,7 +133,7 @@

    ALIA (Incremental Track)

    - + Z3n 0 202552176.778164.57700 @@ -142,7 +142,7 @@

    ALIA (Incremental Track)

    - + CVC4-inc 0 202552528.248499.54700 @@ -151,7 +151,7 @@

    ALIA (Incremental Track)

    - + SMTInterpol 0 202537789.48335.292150 @@ -160,7 +160,7 @@

    ALIA (Incremental Track)

    - + UltimateEliminator+SMTInterpol 0 2016291041.102453.2649230 @@ -169,7 +169,7 @@

    ALIA (Incremental Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 1923865850.1265581.645101661 @@ -178,7 +178,7 @@

    ALIA (Incremental Track)

    - + UltimateEliminator+Yices-2.6.1 0 072.1236.0212025520 @@ -202,7 +202,6 @@

    ALIA (Incremental Track)

    - + - diff --git a/archive/2019/results/alia-single-query.html b/archive/2019/results/alia-single-query.html index 45fb00e7..334cd799 100644 --- a/archive/2019/results/alia-single-query.html +++ b/archive/2019/results/alia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    ALIA (Single Query Track)

    Competition results for the ALIA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    ALIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) SMTInterpolSMTInterpolSMTInterpol - - + + Alt-Ergo - - + + CVC4 - + @@ -131,7 +131,7 @@

    ALIA (Single Query Track)

    - + 2018-Z3n 0 19 @@ -142,7 +142,7 @@

    ALIA (Single Query Track)

    - + Z3n 0 19 @@ -153,7 +153,7 @@

    ALIA (Single Query Track)

    - + SMTInterpol 0 19 @@ -164,7 +164,7 @@

    ALIA (Single Query Track)

    - + CVC4 0 18 @@ -175,7 +175,7 @@

    ALIA (Single Query Track)

    - + Alt-Ergo 0 18 @@ -186,7 +186,7 @@

    ALIA (Single Query Track)

    - + Vampire 0 13 @@ -197,7 +197,7 @@

    ALIA (Single Query Track)

    - + veriT 0 4 @@ -208,7 +208,7 @@

    ALIA (Single Query Track)

    - + UltimateEliminator+SMTInterpol 0 0 @@ -219,7 +219,7 @@

    ALIA (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 0 @@ -230,7 +230,7 @@

    ALIA (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 0 @@ -252,7 +252,7 @@

    ALIA (Single Query Track)

    - + 2018-Z3n 0 191.1651.1661911800 @@ -261,7 +261,7 @@

    ALIA (Single Query Track)

    - + Z3n 0 191.2441.2441911800 @@ -270,7 +270,7 @@

    ALIA (Single Query Track)

    - + SMTInterpol 0 191092.681978.1381911800 @@ -279,7 +279,7 @@

    ALIA (Single Query Track)

    - + CVC4 0 184.7444.7351801810 @@ -288,7 +288,7 @@

    ALIA (Single Query Track)

    - + Alt-Ergo 0 182409.8812404.4161801811 @@ -297,7 +297,7 @@

    ALIA (Single Query Track)

    - + Vampire 0 1528678.91614507.9141501544 @@ -306,7 +306,7 @@

    ALIA (Single Query Track)

    - + veriT 0 436000.0736000.0694041515 @@ -315,7 +315,7 @@

    ALIA (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 044351.31827591.446000199 @@ -324,7 +324,7 @@

    ALIA (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 045313.20627841.39000199 @@ -333,7 +333,7 @@

    ALIA (Single Query Track)

    - + UltimateEliminator+SMTInterpol 0 039040.02127967.9560001910 @@ -353,7 +353,7 @@

    ALIA (Single Query Track)

    - + 2018-Z3n 0 10.0550.055110180 @@ -362,7 +362,7 @@

    ALIA (Single Query Track)

    - + Z3n 0 10.0560.056110180 @@ -371,7 +371,7 @@

    ALIA (Single Query Track)

    - + SMTInterpol 0 11.3830.59110180 @@ -380,7 +380,7 @@

    ALIA (Single Query Track)

    - + CVC4 0 00.2790.277000190 @@ -389,7 +389,7 @@

    ALIA (Single Query Track)

    - + UltimateEliminator+SMTInterpol 0 03.3442.2690001910 @@ -398,7 +398,7 @@

    ALIA (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 03.2952.359000199 @@ -407,7 +407,7 @@

    ALIA (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 03.392.363000199 @@ -416,7 +416,7 @@

    ALIA (Single Query Track)

    - + Alt-Ergo 0 02400.02400.0000191 @@ -425,7 +425,7 @@

    ALIA (Single Query Track)

    - + Vampire 0 02400.02400.0000194 @@ -434,7 +434,7 @@

    ALIA (Single Query Track)

    - + veriT 0 02400.02400.00001915 @@ -454,7 +454,7 @@

    ALIA (Single Query Track)

    - + 2018-Z3n 0 181.111.1111801810 @@ -463,7 +463,7 @@

    ALIA (Single Query Track)

    - + Z3n 0 181.1871.1881801810 @@ -472,7 +472,7 @@

    ALIA (Single Query Track)

    - + Alt-Ergo 0 189.8814.4161801811 @@ -481,7 +481,7 @@

    ALIA (Single Query Track)

    - + CVC4 0 184.4654.4581801810 @@ -490,7 +490,7 @@

    ALIA (Single Query Track)

    - + SMTInterpol 0 181091.298977.5471801810 @@ -499,7 +499,7 @@

    ALIA (Single Query Track)

    - + Vampire 0 1526278.91612107.9141501544 @@ -508,7 +508,7 @@

    ALIA (Single Query Track)

    - + veriT 0 433600.0733600.0694041515 @@ -517,7 +517,7 @@

    ALIA (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 044348.02327589.087000199 @@ -526,7 +526,7 @@

    ALIA (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 045309.81627839.027000199 @@ -535,7 +535,7 @@

    ALIA (Single Query Track)

    - + UltimateEliminator+SMTInterpol 0 039036.67727965.6870001910 @@ -555,7 +555,7 @@

    ALIA (Single Query Track)

    - + 2018-Z3n 0 191.1651.1661911800 @@ -564,7 +564,7 @@

    ALIA (Single Query Track)

    - + Z3n 0 191.2441.2441911800 @@ -573,7 +573,7 @@

    ALIA (Single Query Track)

    - + CVC4 0 184.7444.7351801810 @@ -582,7 +582,7 @@

    ALIA (Single Query Track)

    - + Alt-Ergo 0 1833.88128.4161801811 @@ -591,7 +591,7 @@

    ALIA (Single Query Track)

    - + SMTInterpol 0 14230.497176.7261411355 @@ -600,7 +600,7 @@

    ALIA (Single Query Track)

    - + Vampire 0 5364.996343.7865051414 @@ -609,7 +609,7 @@

    ALIA (Single Query Track)

    - + veriT 0 4360.07360.0694041515 @@ -618,7 +618,7 @@

    ALIA (Single Query Track)

    - + UltimateEliminator+SMTInterpol 0 0344.452332.3420001913 @@ -627,7 +627,7 @@

    ALIA (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 0343.827333.4390001913 @@ -636,7 +636,7 @@

    ALIA (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 0344.049333.440001913 @@ -660,7 +660,6 @@

    ALIA (Single Query Track)

    - + - diff --git a/archive/2019/results/ania-incremental.html b/archive/2019/results/ania-incremental.html index f2edc81c..19c771e9 100644 --- a/archive/2019/results/ania-incremental.html +++ b/archive/2019/results/ania-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    ANIA (Incremental Track)

    Competition results for the ANIA - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    ANIA (Incremental Track)

    Parallel Performance CVC4-inc - - + + @@ -124,7 +124,7 @@

    ANIA (Incremental Track)

    - + 2018-CVC4 (incremental)n 0 230862771.6612769.5664021 @@ -133,7 +133,7 @@

    ANIA (Incremental Track)

    - + CVC4-inc 0 230862857.2652855.2124021 @@ -142,7 +142,7 @@

    ANIA (Incremental Track)

    - + Z3n 0 00.010.032234880 @@ -151,7 +151,7 @@

    ANIA (Incremental Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 08.724.051234880 @@ -160,7 +160,7 @@

    ANIA (Incremental Track)

    - + UltimateEliminator+Yices-2.6.1 0 08.714.094234880 @@ -184,7 +184,6 @@

    ANIA (Incremental Track)

    - + - diff --git a/archive/2019/results/aufdtlia-single-query.html b/archive/2019/results/aufdtlia-single-query.html index 41908745..63908b41 100644 --- a/archive/2019/results/aufdtlia-single-query.html +++ b/archive/2019/results/aufdtlia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    AUFDTLIA (Single Query Track)

    Competition results for the AUFDTLIA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    AUFDTLIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) CVC4CVC4CVC4 - - + + CVC4 - - + + CVC4 - + @@ -131,7 +131,7 @@

    AUFDTLIA (Single Query Track)

    - + 2018-CVC4n 0 275 @@ -142,7 +142,7 @@

    AUFDTLIA (Single Query Track)

    - + CVC4 0 275 @@ -153,7 +153,7 @@

    AUFDTLIA (Single Query Track)

    - + Alt-Ergo 0 181 @@ -164,7 +164,7 @@

    AUFDTLIA (Single Query Track)

    - + Vampire 0 181 @@ -186,7 +186,7 @@

    AUFDTLIA (Single Query Track)

    - + 2018-CVC4n 0 27523099.69723380.6682759318200 @@ -195,7 +195,7 @@

    AUFDTLIA (Single Query Track)

    - + CVC4 0 27546216.90146638.2252759318200 @@ -204,7 +204,7 @@

    AUFDTLIA (Single Query Track)

    - + Vampire 0 182245286.341225137.22318201829393 @@ -213,7 +213,7 @@

    AUFDTLIA (Single Query Track)

    - + Alt-Ergo 0 181111747.563105360.72918101819443 @@ -233,7 +233,7 @@

    AUFDTLIA (Single Query Track)

    - + 2018-CVC4n 0 9323090.72623371.745939301820 @@ -242,7 +242,7 @@

    AUFDTLIA (Single Query Track)

    - + CVC4 0 9346207.25146628.633939301820 @@ -251,7 +251,7 @@

    AUFDTLIA (Single Query Track)

    - + Alt-Ergo 0 0100812.65100804.26100027543 @@ -260,7 +260,7 @@

    AUFDTLIA (Single Query Track)

    - + Vampire 0 0237600.48223182.2900027593 @@ -280,7 +280,7 @@

    AUFDTLIA (Single Query Track)

    - + 2018-CVC4n 0 1828.9718.9231820182930 @@ -289,7 +289,7 @@

    AUFDTLIA (Single Query Track)

    - + CVC4 0 1829.6519.5921820182930 @@ -298,7 +298,7 @@

    AUFDTLIA (Single Query Track)

    - + Vampire 0 1827685.8611954.93318201829393 @@ -307,7 +307,7 @@

    AUFDTLIA (Single Query Track)

    - + Alt-Ergo 0 18110934.9134556.46818101819443 @@ -327,7 +327,7 @@

    AUFDTLIA (Single Query Track)

    - + 2018-CVC4n 0 2271174.4691174.324227451824848 @@ -336,7 +336,7 @@

    AUFDTLIA (Single Query Track)

    - + CVC4 0 2271175.6581175.503227451824848 @@ -345,7 +345,7 @@

    AUFDTLIA (Single Query Track)

    - + Alt-Ergo 0 1761214.0531175.02317601769948 @@ -354,7 +354,7 @@

    AUFDTLIA (Single Query Track)

    - + Vampire 0 1762869.6232517.60317601769999 @@ -378,7 +378,6 @@

    AUFDTLIA (Single Query Track)

    - + - diff --git a/archive/2019/results/auflia-single-query.html b/archive/2019/results/auflia-single-query.html index 7451f32a..3ca54071 100644 --- a/archive/2019/results/auflia-single-query.html +++ b/archive/2019/results/auflia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    AUFLIA (Single Query Track)

    Competition results for the AUFLIA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    AUFLIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) CVC4VampireCVC4 - - + + Vampire - - + + Vampire - + @@ -131,7 +131,7 @@

    AUFLIA (Single Query Track)

    - + 2018-CVC4n 0 1383 @@ -142,7 +142,7 @@

    AUFLIA (Single Query Track)

    - + CVC4-SymBreakn 0 1381 @@ -153,7 +153,7 @@

    AUFLIA (Single Query Track)

    - + CVC4 0 1381 @@ -164,7 +164,7 @@

    AUFLIA (Single Query Track)

    - + Vampire 0 1375 @@ -175,7 +175,7 @@

    AUFLIA (Single Query Track)

    - + Z3n 0 1300 @@ -186,7 +186,7 @@

    AUFLIA (Single Query Track)

    - + Alt-Ergo 0 1217 @@ -197,7 +197,7 @@

    AUFLIA (Single Query Track)

    - + veriT 0 1169 @@ -208,7 +208,7 @@

    AUFLIA (Single Query Track)

    - + SMTInterpol 0 893 @@ -219,7 +219,7 @@

    AUFLIA (Single Query Track)

    - + UltimateEliminator+SMTInterpol 0 14 @@ -230,7 +230,7 @@

    AUFLIA (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 13 @@ -241,7 +241,7 @@

    AUFLIA (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 7 @@ -263,7 +263,7 @@

    AUFLIA (Single Query Track)

    - + 2018-CVC4n 0 1383587410.849587913.96913831371246255231 @@ -272,7 +272,7 @@

    AUFLIA (Single Query Track)

    - + Vampire 0 1382687185.437616673.9811382991283256251 @@ -281,7 +281,7 @@

    AUFLIA (Single Query Track)

    - + CVC4-SymBreakn 0 1381606248.838607006.61213811371244257231 @@ -290,7 +290,7 @@

    AUFLIA (Single Query Track)

    - + CVC4 0 1381616468.103617280.48213811371244257231 @@ -299,7 +299,7 @@

    AUFLIA (Single Query Track)

    - + Z3n 0 1300675012.238677212.77313001551145338201 @@ -308,7 +308,7 @@

    AUFLIA (Single Query Track)

    - + Alt-Ergo 0 1229794253.464720273.105122901229409253 @@ -317,7 +317,7 @@

    AUFLIA (Single Query Track)

    - + veriT 0 1169766211.978766171.143116901169469287 @@ -326,7 +326,7 @@

    AUFLIA (Single Query Track)

    - + SMTInterpol 0 8951732196.1981620506.26189593802743638 @@ -335,7 +335,7 @@

    AUFLIA (Single Query Track)

    - + UltimateEliminator+SMTInterpol 0 14196335.485157729.6551459162457 @@ -344,7 +344,7 @@

    AUFLIA (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 13177996.481158154.921349162560 @@ -353,7 +353,7 @@

    AUFLIA (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 7173572.937158938.424734163161 @@ -373,7 +373,7 @@

    AUFLIA (Single Query Track)

    - + Z3n 0 15523734.07923735.215515501483201 @@ -382,7 +382,7 @@

    AUFLIA (Single Query Track)

    - + 2018-CVC4n 0 13731376.65431565.32313713701501231 @@ -391,7 +391,7 @@

    AUFLIA (Single Query Track)

    - + CVC4-SymBreakn 0 13737956.0938259.26113713701501231 @@ -400,7 +400,7 @@

    AUFLIA (Single Query Track)

    - + CVC4 0 13746160.61446551.50913713701501231 @@ -409,7 +409,7 @@

    AUFLIA (Single Query Track)

    - + Vampire 0 99163680.17156126.925999901539251 @@ -418,7 +418,7 @@

    AUFLIA (Single Query Track)

    - + SMTInterpol 0 9360868.76456093.476939301545638 @@ -427,7 +427,7 @@

    AUFLIA (Single Query Track)

    - + UltimateEliminator+SMTInterpol 0 57031.3414746.39550163357 @@ -436,7 +436,7 @@

    AUFLIA (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 47614.8654559.691440163460 @@ -445,7 +445,7 @@

    AUFLIA (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 36989.3794992.815330163561 @@ -454,7 +454,7 @@

    AUFLIA (Single Query Track)

    - + veriT 0 087096.15387090.0780001638287 @@ -463,7 +463,7 @@

    AUFLIA (Single Query Track)

    - + Alt-Ergo 0 0142781.919142643.5280001638253 @@ -483,7 +483,7 @@

    AUFLIA (Single Query Track)

    - + Vampire 0 128386193.49646991.541128301283355251 @@ -492,7 +492,7 @@

    AUFLIA (Single Query Track)

    - + 2018-CVC4n 0 1246138434.195138748.646124601246392231 @@ -501,7 +501,7 @@

    AUFLIA (Single Query Track)

    - + CVC4-SymBreakn 0 1244150692.748151147.351124401244394231 @@ -510,7 +510,7 @@

    AUFLIA (Single Query Track)

    - + CVC4 0 1244152707.489153128.973124401244394231 @@ -519,7 +519,7 @@

    AUFLIA (Single Query Track)

    - + Alt-Ergo 0 1229248456.484182814.691122901229409253 @@ -528,7 +528,7 @@

    AUFLIA (Single Query Track)

    - + veriT 0 1169310293.021310255.595116901169469287 @@ -537,7 +537,7 @@

    AUFLIA (Single Query Track)

    - + Z3n 0 1145290572.637292456.469114501145493201 @@ -546,7 +546,7 @@

    AUFLIA (Single Query Track)

    - + SMTInterpol 0 8021267718.6961181493.6568020802836638 @@ -555,7 +555,7 @@

    AUFLIA (Single Query Track)

    - + UltimateEliminator+SMTInterpol 0 9108577.9188421.65909162957 @@ -564,7 +564,7 @@

    AUFLIA (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 997069.41288808.782909162960 @@ -573,7 +573,7 @@

    AUFLIA (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 497292.15188767.632404163461 @@ -593,7 +593,7 @@

    AUFLIA (Single Query Track)

    - + Vampire 0 13519527.3317707.2881351991252287287 @@ -602,7 +602,7 @@

    AUFLIA (Single Query Track)

    - + Z3n 0 12719222.719222.74612711551116367342 @@ -611,7 +611,7 @@

    AUFLIA (Single Query Track)

    - + 2018-CVC4n 0 12399636.4019635.9361239871152399391 @@ -620,7 +620,7 @@

    AUFLIA (Single Query Track)

    - + CVC4-SymBreakn 0 12369695.3289694.8481236881148402394 @@ -629,7 +629,7 @@

    AUFLIA (Single Query Track)

    - + CVC4 0 12359705.259704.7861235881147403395 @@ -638,7 +638,7 @@

    AUFLIA (Single Query Track)

    - + Alt-Ergo 0 116510881.0249272.146116501165473322 @@ -647,7 +647,7 @@

    AUFLIA (Single Query Track)

    - + veriT 0 110810066.74510049.121110801108530365 @@ -656,7 +656,7 @@

    AUFLIA (Single Query Track)

    - + SMTInterpol 0 82320632.69719458.57282393730815754 @@ -665,7 +665,7 @@

    AUFLIA (Single Query Track)

    - + UltimateEliminator+SMTInterpol 0 147675.8155595.3681459162483 @@ -674,7 +674,7 @@

    AUFLIA (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 137663.9165823.0541349162582 @@ -683,7 +683,7 @@

    AUFLIA (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 77559.955793.901734163181 @@ -707,7 +707,6 @@

    AUFLIA (Single Query Track)

    - + - diff --git a/archive/2019/results/auflia-unsat-core.html b/archive/2019/results/auflia-unsat-core.html index fe13e99e..718941a1 100644 --- a/archive/2019/results/auflia-unsat-core.html +++ b/archive/2019/results/auflia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    AUFLIA (Unsat Core Track)

    Competition results for the AUFLIA - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    AUFLIA (Unsat Core Track)

    Sequential PerformanceParallel Performance CVC4-ucCVC4-uc - - + + @@ -126,7 +126,7 @@

    AUFLIA (Unsat Core Track)

    - + 2018-CVC4 (unsat core)n 0 37870 @@ -137,7 +137,7 @@

    AUFLIA (Unsat Core Track)

    - + CVC4-uc 0 37328 @@ -148,7 +148,7 @@

    AUFLIA (Unsat Core Track)

    - + Z3n 0 35012 @@ -159,7 +159,7 @@

    AUFLIA (Unsat Core Track)

    - + UltimateEliminator+SMTInterpol 0 1 @@ -170,7 +170,7 @@

    AUFLIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 1 @@ -181,7 +181,7 @@

    AUFLIA (Unsat Core Track)

    - + UltimateEliminator+Yices-2.6.1 0 0 @@ -203,7 +203,7 @@

    AUFLIA (Unsat Core Track)

    - + 2018-CVC4 (unsat core)n 0 37870215828.992215831.29584 @@ -212,7 +212,7 @@

    AUFLIA (Unsat Core Track)

    - + CVC4-uc 0 37328238219.162238220.4795 @@ -221,7 +221,7 @@

    AUFLIA (Unsat Core Track)

    - + Z3n 0 35012192293.84192300.68650 @@ -230,7 +230,7 @@

    AUFLIA (Unsat Core Track)

    - + UltimateEliminator+SMTInterpol 0 1107468.21896015.94437 @@ -239,7 +239,7 @@

    AUFLIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 1105675.06197358.7638 @@ -248,7 +248,7 @@

    AUFLIA (Unsat Core Track)

    - + UltimateEliminator+Yices-2.6.1 0 0103179.10994916.72637 @@ -272,7 +272,6 @@

    AUFLIA (Unsat Core Track)

    - + - diff --git a/archive/2019/results/auflira-single-query.html b/archive/2019/results/auflira-single-query.html index 7022ebaa..82c7a83a 100644 --- a/archive/2019/results/auflira-single-query.html +++ b/archive/2019/results/auflira-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    AUFLIRA (Single Query Track)

    Competition results for the AUFLIRA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    AUFLIRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Par4Par4Par4 - - + + Vampire - - + + Par4 - + @@ -131,7 +131,7 @@

    AUFLIRA (Single Query Track)

    - + Par4 0 1601 @@ -142,7 +142,7 @@

    AUFLIRA (Single Query Track)

    - + Z3n 0 1600 @@ -153,7 +153,7 @@

    AUFLIRA (Single Query Track)

    - + 2018-Z3n 0 1600 @@ -164,7 +164,7 @@

    AUFLIRA (Single Query Track)

    - + CVC4 0 1560 @@ -175,7 +175,7 @@

    AUFLIRA (Single Query Track)

    - + Vampire 0 1560 @@ -186,7 +186,7 @@

    AUFLIRA (Single Query Track)

    - + CVC4-SymBreakn 0 1559 @@ -197,7 +197,7 @@

    AUFLIRA (Single Query Track)

    - + Alt-Ergo 0 1506 @@ -208,7 +208,7 @@

    AUFLIRA (Single Query Track)

    - + veriT 0 1323 @@ -219,7 +219,7 @@

    AUFLIRA (Single Query Track)

    - + SMTInterpol 0 157 @@ -230,7 +230,7 @@

    AUFLIRA (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 0 @@ -241,7 +241,7 @@

    AUFLIRA (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 0 @@ -252,7 +252,7 @@

    AUFLIRA (Single Query Track)

    - + UltimateEliminator+SMTInterpol 0 0 @@ -274,7 +274,7 @@

    AUFLIRA (Single Query Track)

    - + Par4 0 1601196956.463196929.57616015415478272 @@ -283,7 +283,7 @@

    AUFLIRA (Single Query Track)

    - + Z3n 0 1600114021.376114392.63416005415468327 @@ -292,7 +292,7 @@

    AUFLIRA (Single Query Track)

    - + 2018-Z3n 0 1600151370.151151702.80416005415468347 @@ -301,7 +301,7 @@

    AUFLIRA (Single Query Track)

    - + Vampire 0 1563348500.047301838.783156301563120120 @@ -310,7 +310,7 @@

    AUFLIRA (Single Query Track)

    - + CVC4 0 1560301933.225302014.97156001560123121 @@ -319,7 +319,7 @@

    AUFLIRA (Single Query Track)

    - + CVC4-SymBreakn 0 1559303469.062303702.214155901559124122 @@ -328,7 +328,7 @@

    AUFLIRA (Single Query Track)

    - + Alt-Ergo 0 1519485607.727412866.757151901519164161 @@ -337,7 +337,7 @@

    AUFLIRA (Single Query Track)

    - + veriT 0 1323845923.253845658.766132301323360289 @@ -346,7 +346,7 @@

    AUFLIRA (Single Query Track)

    - + SMTInterpol 0 1573983938.9643464247.127157015715261315 @@ -355,7 +355,7 @@

    AUFLIRA (Single Query Track)

    - + UltimateEliminator+SMTInterpol 0 060367.2157226.853000168322 @@ -364,7 +364,7 @@

    AUFLIRA (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 060314.70357557.642000168322 @@ -373,7 +373,7 @@

    AUFLIRA (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 062605.11159843.716000168323 @@ -393,7 +393,7 @@

    AUFLIRA (Single Query Track)

    - + Par4 0 542508.8532460.40654540162972 @@ -402,7 +402,7 @@

    AUFLIRA (Single Query Track)

    - + 2018-Z3n 0 542461.5782461.58354540162947 @@ -411,7 +411,7 @@

    AUFLIRA (Single Query Track)

    - + Z3n 0 542463.9182461.83454540162927 @@ -420,7 +420,7 @@

    AUFLIRA (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 0185.004121.574000168323 @@ -429,7 +429,7 @@

    AUFLIRA (Single Query Track)

    - + UltimateEliminator+SMTInterpol 0 0210.37128.334000168322 @@ -438,7 +438,7 @@

    AUFLIRA (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 0207.372138.587000168322 @@ -447,7 +447,7 @@

    AUFLIRA (Single Query Track)

    - + CVC4 0 0128411.319128401.4820001683121 @@ -456,7 +456,7 @@

    AUFLIRA (Single Query Track)

    - + CVC4-SymBreakn 0 0128449.249128450.6490001683122 @@ -465,7 +465,7 @@

    AUFLIRA (Single Query Track)

    - + veriT 0 0129751.662129751.6590001683289 @@ -474,7 +474,7 @@

    AUFLIRA (Single Query Track)

    - + Alt-Ergo 0 0144930.81130434.440001683161 @@ -483,7 +483,7 @@

    AUFLIRA (Single Query Track)

    - + Vampire 0 0139200.09131993.910001683120 @@ -492,7 +492,7 @@

    AUFLIRA (Single Query Track)

    - + SMTInterpol 0 0132000.0132000.000016831315 @@ -512,7 +512,7 @@

    AUFLIRA (Single Query Track)

    - + Vampire 0 156353299.95713844.873156301563120120 @@ -521,7 +521,7 @@

    AUFLIRA (Single Query Track)

    - + CVC4 0 156017521.90617613.488156001560123121 @@ -530,7 +530,7 @@

    AUFLIRA (Single Query Track)

    - + CVC4-SymBreakn 0 155919019.81319251.565155901559124122 @@ -539,7 +539,7 @@

    AUFLIRA (Single Query Track)

    - + Par4 0 154738447.6138469.1715470154713672 @@ -548,7 +548,7 @@

    AUFLIRA (Single Query Track)

    - + Z3n 0 154636477.37336477.59715460154613727 @@ -557,7 +557,7 @@

    AUFLIRA (Single Query Track)

    - + 2018-Z3n 0 154638119.45938122.27615460154613747 @@ -566,7 +566,7 @@

    AUFLIRA (Single Query Track)

    - + Alt-Ergo 0 1519184676.917126432.317151901519164161 @@ -575,7 +575,7 @@

    AUFLIRA (Single Query Track)

    - + veriT 0 1323561386.111561120.787132301323360289 @@ -584,7 +584,7 @@

    AUFLIRA (Single Query Track)

    - + SMTInterpol 0 1573696542.6443177978.561157015715261315 @@ -593,7 +593,7 @@

    AUFLIRA (Single Query Track)

    - + UltimateEliminator+SMTInterpol 0 059925.82556954.069000168322 @@ -602,7 +602,7 @@

    AUFLIRA (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 059882.9257265.647000168322 @@ -611,7 +611,7 @@

    AUFLIRA (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 062190.99659569.552000168323 @@ -631,7 +631,7 @@

    AUFLIRA (Single Query Track)

    - + Par4 0 16012124.4632097.57616015415478272 @@ -640,7 +640,7 @@

    AUFLIRA (Single Query Track)

    - + Z3n 0 16001909.931907.31916005415468365 @@ -649,7 +649,7 @@

    AUFLIRA (Single Query Track)

    - + 2018-Z3n 0 16002175.5082175.60916005415468381 @@ -658,7 +658,7 @@

    AUFLIRA (Single Query Track)

    - + Vampire 0 15169638.9915659.215151601516167167 @@ -667,7 +667,7 @@

    AUFLIRA (Single Query Track)

    - + CVC4-SymBreakn 0 14904782.0054781.607149001490193193 @@ -676,7 +676,7 @@

    AUFLIRA (Single Query Track)

    - + CVC4 0 14894790.3924788.378148901489194194 @@ -685,7 +685,7 @@

    AUFLIRA (Single Query Track)

    - + Alt-Ergo 0 14835536.6585058.654148301483200200 @@ -694,7 +694,7 @@

    AUFLIRA (Single Query Track)

    - + veriT 0 130113585.59613548.749130101301382377 @@ -703,7 +703,7 @@

    AUFLIRA (Single Query Track)

    - + SMTInterpol 0 13837575.40137406.221138013815451545 @@ -712,7 +712,7 @@

    AUFLIRA (Single Query Track)

    - + UltimateEliminator+SMTInterpol 0 06498.5594318.984000168327 @@ -721,7 +721,7 @@

    AUFLIRA (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 06329.9264536.488000168328 @@ -730,7 +730,7 @@

    AUFLIRA (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 06464.7464648.706000168327 @@ -754,7 +754,6 @@

    AUFLIRA (Single Query Track)

    - + - diff --git a/archive/2019/results/auflira-unsat-core.html b/archive/2019/results/auflira-unsat-core.html index 7f23037e..d2088e37 100644 --- a/archive/2019/results/auflira-unsat-core.html +++ b/archive/2019/results/auflira-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    AUFLIRA (Unsat Core Track)

    Competition results for the AUFLIRA - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    AUFLIRA (Unsat Core Track)

    Sequential PerformanceParallel Performance CVC4-ucCVC4-uc - - + + @@ -126,7 +126,7 @@

    AUFLIRA (Unsat Core Track)

    - + 2018-CVC4 (unsat core)n 0 179280 @@ -137,7 +137,7 @@

    AUFLIRA (Unsat Core Track)

    - + CVC4-uc 0 179276 @@ -148,7 +148,7 @@

    AUFLIRA (Unsat Core Track)

    - + Z3n 0 178986 @@ -159,7 +159,7 @@

    AUFLIRA (Unsat Core Track)

    - + UltimateEliminator+Yices-2.6.1 0 0 @@ -170,7 +170,7 @@

    AUFLIRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 0 @@ -181,7 +181,7 @@

    AUFLIRA (Unsat Core Track)

    - + UltimateEliminator+SMTInterpol 0 0 @@ -203,7 +203,7 @@

    AUFLIRA (Unsat Core Track)

    - + 2018-CVC4 (unsat core)n 0 179280193078.357193077.46980 @@ -212,7 +212,7 @@

    AUFLIRA (Unsat Core Track)

    - + CVC4-uc 0 179276191735.211191733.72779 @@ -221,7 +221,7 @@

    AUFLIRA (Unsat Core Track)

    - + Z3n 0 17898625635.78525636.2723 @@ -230,7 +230,7 @@

    AUFLIRA (Unsat Core Track)

    - + UltimateEliminator+SMTInterpol 0 0118561.663104430.01834 @@ -239,7 +239,7 @@

    AUFLIRA (Unsat Core Track)

    - + UltimateEliminator+Yices-2.6.1 0 0117755.761105108.1534 @@ -248,7 +248,7 @@

    AUFLIRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 0118470.059106302.91134 @@ -272,7 +272,6 @@

    AUFLIRA (Unsat Core Track)

    - + - diff --git a/archive/2019/results/aufnia-single-query.html b/archive/2019/results/aufnia-single-query.html index 1dde9379..68999e87 100644 --- a/archive/2019/results/aufnia-single-query.html +++ b/archive/2019/results/aufnia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    AUFNIA (Single Query Track)

    Competition results for the AUFNIA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    AUFNIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) ——— - - + + — - - + + — - + @@ -131,7 +131,7 @@

    AUFNIA (Single Query Track)

    - + CVC4-SymBreakn 0 0 @@ -142,7 +142,7 @@

    AUFNIA (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 0 @@ -153,7 +153,7 @@

    AUFNIA (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 0 @@ -164,7 +164,7 @@

    AUFNIA (Single Query Track)

    - + CVC4 0 0 @@ -175,7 +175,7 @@

    AUFNIA (Single Query Track)

    - + Z3n 0 0 @@ -186,7 +186,7 @@

    AUFNIA (Single Query Track)

    - + Alt-Ergo 0 0 @@ -197,7 +197,7 @@

    AUFNIA (Single Query Track)

    - + Vampire 0 0 @@ -219,7 +219,7 @@

    AUFNIA (Single Query Track)

    - + CVC4-SymBreakn 0 00.0310.0300030 @@ -228,7 +228,7 @@

    AUFNIA (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 09.5126.8300030 @@ -237,7 +237,7 @@

    AUFNIA (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 09.8926.88500030 @@ -246,7 +246,7 @@

    AUFNIA (Single Query Track)

    - + Alt-Ergo 0 07200.07200.000033 @@ -255,7 +255,7 @@

    AUFNIA (Single Query Track)

    - + CVC4 0 07200.07200.000033 @@ -264,7 +264,7 @@

    AUFNIA (Single Query Track)

    - + Vampire 0 07200.07200.000033 @@ -273,7 +273,7 @@

    AUFNIA (Single Query Track)

    - + Z3n 0 07200.07200.000033 @@ -293,7 +293,7 @@

    AUFNIA (Single Query Track)

    - + Alt-Ergo 0 00.00.000033 @@ -302,7 +302,7 @@

    AUFNIA (Single Query Track)

    - + CVC4 0 00.00.000033 @@ -311,7 +311,7 @@

    AUFNIA (Single Query Track)

    - + CVC4-SymBreakn 0 00.00.000030 @@ -320,7 +320,7 @@

    AUFNIA (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 00.00.000030 @@ -329,7 +329,7 @@

    AUFNIA (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 00.00.000030 @@ -338,7 +338,7 @@

    AUFNIA (Single Query Track)

    - + Vampire 0 00.00.000033 @@ -347,7 +347,7 @@

    AUFNIA (Single Query Track)

    - + Z3n 0 00.00.000033 @@ -367,7 +367,7 @@

    AUFNIA (Single Query Track)

    - + CVC4-SymBreakn 0 00.0310.0300030 @@ -376,7 +376,7 @@

    AUFNIA (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 09.5126.8300030 @@ -385,7 +385,7 @@

    AUFNIA (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 09.8926.88500030 @@ -394,7 +394,7 @@

    AUFNIA (Single Query Track)

    - + Alt-Ergo 0 07200.07200.000033 @@ -403,7 +403,7 @@

    AUFNIA (Single Query Track)

    - + CVC4 0 07200.07200.000033 @@ -412,7 +412,7 @@

    AUFNIA (Single Query Track)

    - + Vampire 0 07200.07200.000033 @@ -421,7 +421,7 @@

    AUFNIA (Single Query Track)

    - + Z3n 0 07200.07200.000033 @@ -441,7 +441,7 @@

    AUFNIA (Single Query Track)

    - + CVC4-SymBreakn 0 00.0310.0300030 @@ -450,7 +450,7 @@

    AUFNIA (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 09.5126.8300030 @@ -459,7 +459,7 @@

    AUFNIA (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 09.8926.88500030 @@ -468,7 +468,7 @@

    AUFNIA (Single Query Track)

    - + Alt-Ergo 0 072.072.000033 @@ -477,7 +477,7 @@

    AUFNIA (Single Query Track)

    - + CVC4 0 072.072.000033 @@ -486,7 +486,7 @@

    AUFNIA (Single Query Track)

    - + Vampire 0 072.072.000033 @@ -495,7 +495,7 @@

    AUFNIA (Single Query Track)

    - + Z3n 0 072.072.000033 @@ -519,7 +519,6 @@

    AUFNIA (Single Query Track)

    - + - diff --git a/archive/2019/results/aufnia-unsat-core.html b/archive/2019/results/aufnia-unsat-core.html index abaedd7f..0df63766 100644 --- a/archive/2019/results/aufnia-unsat-core.html +++ b/archive/2019/results/aufnia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    AUFNIA (Unsat Core Track)

    Competition results for the AUFNIA - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    AUFNIA (Unsat Core Track)

    Sequential PerformanceParallel Performance —— - - + + @@ -126,7 +126,7 @@

    AUFNIA (Unsat Core Track)

    - + UltimateEliminator+Yices-2.6.1 0 0 @@ -137,7 +137,7 @@

    AUFNIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 0 @@ -148,7 +148,7 @@

    AUFNIA (Unsat Core Track)

    - + Z3n 0 0 @@ -159,7 +159,7 @@

    AUFNIA (Unsat Core Track)

    - + CVC4-uc 0 0 @@ -181,7 +181,7 @@

    AUFNIA (Unsat Core Track)

    - + UltimateEliminator+Yices-2.6.1 0 09.5756.5660 @@ -190,7 +190,7 @@

    AUFNIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 09.9446.9270 @@ -199,7 +199,7 @@

    AUFNIA (Unsat Core Track)

    - + CVC4-uc 0 07200.07200.03 @@ -208,7 +208,7 @@

    AUFNIA (Unsat Core Track)

    - + Z3n 0 07200.07200.03 @@ -232,7 +232,6 @@

    AUFNIA (Unsat Core Track)

    - + - diff --git a/archive/2019/results/aufnira-incremental.html b/archive/2019/results/aufnira-incremental.html index 389048c2..073b74fb 100644 --- a/archive/2019/results/aufnira-incremental.html +++ b/archive/2019/results/aufnira-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    AUFNIRA (Incremental Track)

    Competition results for the AUFNIRA - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    AUFNIRA (Incremental Track)

    Parallel Performance CVC4-inc - - + + @@ -124,7 +124,7 @@

    AUFNIRA (Incremental Track)

    - + CVC4-inc 0 312849229.10249229.4332420 @@ -133,7 +133,7 @@

    AUFNIRA (Incremental Track)

    - + Z3n 0 286281465.2581468.25759029 @@ -142,7 +142,7 @@

    AUFNIRA (Incremental Track)

    - + 2018-CVC4 (incremental)n 0 260062.07361.8458520 @@ -151,7 +151,7 @@

    AUFNIRA (Incremental Track)

    - + UltimateEliminator+Yices-2.6.1 0 0483.733227.83434520 @@ -160,7 +160,7 @@

    AUFNIRA (Incremental Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 0497.97237.80734520 @@ -184,7 +184,6 @@

    AUFNIRA (Incremental Track)

    - + - diff --git a/archive/2019/results/aufnira-single-query.html b/archive/2019/results/aufnira-single-query.html index 63b7969a..63eade76 100644 --- a/archive/2019/results/aufnira-single-query.html +++ b/archive/2019/results/aufnira-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    AUFNIRA (Single Query Track)

    Competition results for the AUFNIRA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    AUFNIRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Par4Par4Par4 - - + + Par4 - - + + Par4 - + @@ -131,7 +131,7 @@

    AUFNIRA (Single Query Track)

    - + Par4 0 62 @@ -142,7 +142,7 @@

    AUFNIRA (Single Query Track)

    - + CVC4 0 58 @@ -153,7 +153,7 @@

    AUFNIRA (Single Query Track)

    - + 2018-CVC4n 0 57 @@ -164,7 +164,7 @@

    AUFNIRA (Single Query Track)

    - + Vampire 0 51 @@ -175,7 +175,7 @@

    AUFNIRA (Single Query Track)

    - + Z3n 0 40 @@ -186,7 +186,7 @@

    AUFNIRA (Single Query Track)

    - + Alt-Ergo 0 37 @@ -197,7 +197,7 @@

    AUFNIRA (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 0 @@ -208,7 +208,7 @@

    AUFNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 0 @@ -230,7 +230,7 @@

    AUFNIRA (Single Query Track)

    - + Par4 0 63579231.445574140.82963360237231 @@ -239,7 +239,7 @@

    AUFNIRA (Single Query Track)

    - + CVC4 0 58584791.509585015.51858157242239 @@ -248,7 +248,7 @@

    AUFNIRA (Single Query Track)

    - + 2018-CVC4n 0 57580753.004580809.84657156243240 @@ -257,7 +257,7 @@

    AUFNIRA (Single Query Track)

    - + Vampire 0 57611447.172590308.43557057243243 @@ -266,7 +266,7 @@

    AUFNIRA (Single Query Track)

    - + Z3n 0 40279505.649284071.7414033726047 @@ -275,7 +275,7 @@

    AUFNIRA (Single Query Track)

    - + Alt-Ergo 0 37647912.142629787.79737037263261 @@ -284,7 +284,7 @@

    AUFNIRA (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 0952.187799.0040003000 @@ -293,7 +293,7 @@

    AUFNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 03426.4393923.2350003001 @@ -313,7 +313,7 @@

    AUFNIRA (Single Query Track)

    - + Par4 0 30.0180.068330297231 @@ -322,7 +322,7 @@

    AUFNIRA (Single Query Track)

    - + Z3n 0 30.1130.11333029747 @@ -331,7 +331,7 @@

    AUFNIRA (Single Query Track)

    - + 2018-CVC4n 0 12884.5982906.816110299240 @@ -340,7 +340,7 @@

    AUFNIRA (Single Query Track)

    - + CVC4 0 13366.1573404.964110299239 @@ -349,7 +349,7 @@

    AUFNIRA (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 09.3796.8850003000 @@ -358,7 +358,7 @@

    AUFNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 010.1927.0980003001 @@ -367,7 +367,7 @@

    AUFNIRA (Single Query Track)

    - + Alt-Ergo 0 04800.1394800.052000300261 @@ -376,7 +376,7 @@

    AUFNIRA (Single Query Track)

    - + Vampire 0 07200.07200.0000300243 @@ -396,7 +396,7 @@

    AUFNIRA (Single Query Track)

    - + Par4 0 6024831.42719740.7660060240231 @@ -405,7 +405,7 @@

    AUFNIRA (Single Query Track)

    - + Vampire 0 5749847.17228708.43557057243243 @@ -414,7 +414,7 @@

    AUFNIRA (Single Query Track)

    - + CVC4 0 5730381.75530560.97957057243239 @@ -423,7 +423,7 @@

    AUFNIRA (Single Query Track)

    - + 2018-CVC4n 0 5627515.24427576.47456056244240 @@ -432,7 +432,7 @@

    AUFNIRA (Single Query Track)

    - + Z3n 0 3728515.69829182.8983703726347 @@ -441,7 +441,7 @@

    AUFNIRA (Single Query Track)

    - + Alt-Ergo 0 3774310.40370789.75537037263261 @@ -450,7 +450,7 @@

    AUFNIRA (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 0209.835151.5040003000 @@ -459,7 +459,7 @@

    AUFNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 0224.418159.2010003001 @@ -479,7 +479,7 @@

    AUFNIRA (Single Query Track)

    - + Par4 0 446241.5756196.17344341256250 @@ -488,7 +488,7 @@

    AUFNIRA (Single Query Track)

    - + Vampire 0 417238.7026479.31441041259259 @@ -497,7 +497,7 @@

    AUFNIRA (Single Query Track)

    - + Z3n 0 375929.7525929.75537334263243 @@ -506,7 +506,7 @@

    AUFNIRA (Single Query Track)

    - + 2018-CVC4n 0 346418.3716418.36434034266265 @@ -515,7 +515,7 @@

    AUFNIRA (Single Query Track)

    - + Alt-Ergo 0 326658.2026474.38432032268266 @@ -524,7 +524,7 @@

    AUFNIRA (Single Query Track)

    - + CVC4 0 316449.3336449.32131031269268 @@ -533,7 +533,7 @@

    AUFNIRA (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 0972.85705.1930003001 @@ -542,7 +542,7 @@

    AUFNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 01090.85782.8620003003 @@ -566,7 +566,6 @@

    AUFNIRA (Single Query Track)

    - + - diff --git a/archive/2019/results/aufnira-unsat-core.html b/archive/2019/results/aufnira-unsat-core.html index 1a3f6a06..0733c18d 100644 --- a/archive/2019/results/aufnira-unsat-core.html +++ b/archive/2019/results/aufnira-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    AUFNIRA (Unsat Core Track)

    Competition results for the AUFNIRA - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    AUFNIRA (Unsat Core Track)

    Sequential PerformanceParallel Performance CVC4-ucCVC4-uc - - + + @@ -126,7 +126,7 @@

    AUFNIRA (Unsat Core Track)

    - + CVC4-uc 0 15997 @@ -137,7 +137,7 @@

    AUFNIRA (Unsat Core Track)

    - + 2018-CVC4 (unsat core)n 0 15984 @@ -148,7 +148,7 @@

    AUFNIRA (Unsat Core Track)

    - + Z3n 0 15600 @@ -159,7 +159,7 @@

    AUFNIRA (Unsat Core Track)

    - + UltimateEliminator+Yices-2.6.1 0 0 @@ -170,7 +170,7 @@

    AUFNIRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 0 @@ -192,7 +192,7 @@

    AUFNIRA (Unsat Core Track)

    - + CVC4-uc 0 1599734630.40834630.4514 @@ -201,7 +201,7 @@

    AUFNIRA (Unsat Core Track)

    - + 2018-CVC4 (unsat core)n 0 1598436605.01536605.0115 @@ -210,7 +210,7 @@

    AUFNIRA (Unsat Core Track)

    - + Z3n 0 1560015072.12616005.962 @@ -219,7 +219,7 @@

    AUFNIRA (Unsat Core Track)

    - + UltimateEliminator+Yices-2.6.1 0 01699.6771190.4920 @@ -228,7 +228,7 @@

    AUFNIRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 01833.511797.8210 @@ -252,7 +252,6 @@

    AUFNIRA (Unsat Core Track)

    - + - diff --git a/archive/2019/results/biggest-lead-challenge-incremental.html b/archive/2019/results/biggest-lead-challenge-incremental.html index 7c2dd802..979f6b6c 100644 --- a/archive/2019/results/biggest-lead-challenge-incremental.html +++ b/archive/2019/results/biggest-lead-challenge-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -90,27 +90,27 @@

    Winners

    - - + + - - - - + + + + - - + + - - - - + + + +
    Parallel Performance
    - + Yices 2.6.2 Incremental - +
    @@ -130,7 +130,7 @@

    Parallel Performance

    - + Yices 2.6.2 Incremental 1.89130435 @@ -144,7 +144,7 @@

    Parallel Performance

    - + Boolector (incremental) 1.0776699 @@ -158,7 +158,7 @@

    Parallel Performance

    - + Boolector (incremental) 1.03816794 @@ -196,7 +196,6 @@

    Parallel Performance

    - + - diff --git a/archive/2019/results/biggest-lead-challenge-non-incremental.html b/archive/2019/results/biggest-lead-challenge-non-incremental.html index 420fc2c6..80de3f22 100644 --- a/archive/2019/results/biggest-lead-challenge-non-incremental.html +++ b/archive/2019/results/biggest-lead-challenge-non-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -90,59 +90,59 @@

    Winners

    - + - - + + - - + + - - + + - - + + - + - + - - + + - - + + - - + + - - + + - +
    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24 seconds Performance (parallel)
    - + Yices 2.6.2 - + - + Yices 2.6.2 - + - + Yices 2.6.2 - + - + Poolector - + - + Yices 2.6.2 - +

    Sequential Performance

    @@ -160,7 +160,7 @@

    Sequential Performance

    - + Yices 2.6.2 1.83333333 @@ -174,7 +174,7 @@

    Sequential Performance

    - + Boolector 1.0 @@ -188,7 +188,7 @@

    Sequential Performance

    - + Boolector 1.0 @@ -217,7 +217,7 @@

    Parallel Performance

    - + Yices 2.6.2 1.83333333 @@ -231,7 +231,7 @@

    Parallel Performance

    - + Minkeyrink Solver MT 1.0 @@ -245,7 +245,7 @@

    Parallel Performance

    - + Boolector 1.0 @@ -274,7 +274,7 @@

    SAT Performance

    - + Yices 2.6.2 2.0 @@ -288,7 +288,7 @@

    SAT Performance

    - + Minkeyrink Solver MT 1.0 @@ -302,7 +302,7 @@

    SAT Performance

    - + Boolector 1.0 @@ -331,7 +331,7 @@

    UNSAT Performance

    - + Poolector 1.0 @@ -360,7 +360,7 @@

    24s Performance

    - + Yices 2.6.2 1.33333333 @@ -374,7 +374,7 @@

    24s Performance

    - + Poolector 1.25 @@ -388,7 +388,7 @@

    24s Performance

    - + Minkeyrink Solver MT 1.25 @@ -420,7 +420,6 @@

    24s Performance

    - + - diff --git a/archive/2019/results/biggest-lead-incremental.html b/archive/2019/results/biggest-lead-incremental.html index 0d7ed2a8..60300a21 100644 --- a/archive/2019/results/biggest-lead-incremental.html +++ b/archive/2019/results/biggest-lead-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -90,27 +90,27 @@

    Winners

    - - + + - - - - + + + + - - + + - - - - + + + +
    Parallel Performance
    - + CVC4-inc - +
    @@ -130,7 +130,7 @@

    Parallel Performance

    - + CVC4-inc 23087.0 @@ -144,7 +144,7 @@

    Parallel Performance

    - + CVC4-inc 3129.0 @@ -158,7 +158,7 @@

    Parallel Performance

    - + CVC4-inc 4.00793257 @@ -172,7 +172,7 @@

    Parallel Performance

    - + CVC4-inc 3.28915663 @@ -186,7 +186,7 @@

    Parallel Performance

    - + CVC4-inc 2.58073329 @@ -200,7 +200,7 @@

    Parallel Performance

    - + CVC4-inc 1.3556116 @@ -214,7 +214,7 @@

    Parallel Performance

    - + Yices 2.6.2 Incremental 1.33282675 @@ -228,7 +228,7 @@

    Parallel Performance

    - + CVC4-inc 1.31463961 @@ -242,7 +242,7 @@

    Parallel Performance

    - + Yices 2.6.2 Incremental 1.29305913 @@ -256,7 +256,7 @@

    Parallel Performance

    - + MathSAT-na-ext 1.01315789 @@ -270,7 +270,7 @@

    Parallel Performance

    - + Yices 2.6.2 Incremental 1.00566493 @@ -284,7 +284,7 @@

    Parallel Performance

    - + Yices 2.6.2 Incremental 1.00286123 @@ -298,7 +298,7 @@

    Parallel Performance

    - + Yices 2.6.2 Incremental 1.00071925 @@ -312,7 +312,7 @@

    Parallel Performance

    - + SMTInterpol 1.00060635 @@ -326,7 +326,7 @@

    Parallel Performance

    - + SMTInterpol 1.00031516 @@ -340,7 +340,7 @@

    Parallel Performance

    - + Yices 2.6.2 Incremental 1.00024257 @@ -354,7 +354,7 @@

    Parallel Performance

    - + CVC4-inc 1.00007406 @@ -368,7 +368,7 @@

    Parallel Performance

    - + SMTInterpol 1.00005091 @@ -382,7 +382,7 @@

    Parallel Performance

    - + Yices 2.6.2 Incremental 1.0 @@ -396,7 +396,7 @@

    Parallel Performance

    - + Yices 2.6.2 Incremental 1.0 @@ -410,7 +410,7 @@

    Parallel Performance

    - + MathSAT-default 1.0 @@ -424,7 +424,7 @@

    Parallel Performance

    - + Yices 2.6.2 Incremental 1.0 @@ -438,7 +438,7 @@

    Parallel Performance

    - + MathSAT-default 1.0 @@ -476,7 +476,6 @@

    Parallel Performance

    - + - diff --git a/archive/2019/results/biggest-lead-model-validation.html b/archive/2019/results/biggest-lead-model-validation.html index 9adc34af..8b04a231 100644 --- a/archive/2019/results/biggest-lead-model-validation.html +++ b/archive/2019/results/biggest-lead-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -103,7 +103,7 @@

    Sequential Performance

    - + Boolector 1.00265623 @@ -132,7 +132,7 @@

    Parallel Performance

    - + Boolector 1.00265623 @@ -170,7 +170,6 @@

    Parallel Performance

    - + - diff --git a/archive/2019/results/biggest-lead-single-query.html b/archive/2019/results/biggest-lead-single-query.html index ad4d3fd6..a209c77a 100644 --- a/archive/2019/results/biggest-lead-single-query.html +++ b/archive/2019/results/biggest-lead-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -90,59 +90,59 @@

    Winners

    - + - - + + - - + + - - + + - - + + - + - + - - + + - - + + - - + + - - + + - +
    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24 seconds Performance (parallel)
    - + CVC4 - + - + CVC4 - + - + CVC4 - + - + CVC4 - + - + CVC4 - +

    Sequential Performance

    @@ -160,7 +160,7 @@

    Sequential Performance

    - + CVC4 4.81111111 @@ -174,7 +174,7 @@

    Sequential Performance

    - + CVC4 4.5 @@ -188,7 +188,7 @@

    Sequential Performance

    - + CVC4 3.12857143 @@ -202,7 +202,7 @@

    Sequential Performance

    - + CVC4 2.5 @@ -216,7 +216,7 @@

    Sequential Performance

    - + CVC4 2.15 @@ -230,7 +230,7 @@

    Sequential Performance

    - + CVC4 2.0 @@ -244,7 +244,7 @@

    Sequential Performance

    - + Par4 1.79310345 @@ -258,7 +258,7 @@

    Sequential Performance

    - + CVC4 1.51648352 @@ -272,7 +272,7 @@

    Sequential Performance

    - + SMT-RAT 1.5 @@ -286,7 +286,7 @@

    Sequential Performance

    - + CVC4 1.27884615 @@ -300,7 +300,7 @@

    Sequential Performance

    - + Par4 1.2 @@ -314,7 +314,7 @@

    Sequential Performance

    - + Par4 1.18181818 @@ -328,7 +328,7 @@

    Sequential Performance

    - + Yices 2.6.2 1.18181818 @@ -342,7 +342,7 @@

    Sequential Performance

    - + Par4 1.15441176 @@ -356,7 +356,7 @@

    Sequential Performance

    - + Par4 1.14285714 @@ -370,7 +370,7 @@

    Sequential Performance

    - + CVC4 1.11111111 @@ -384,7 +384,7 @@

    Sequential Performance

    - + Par4 1.08226779 @@ -398,7 +398,7 @@

    Sequential Performance

    - + Par4 1.06779661 @@ -412,7 +412,7 @@

    Sequential Performance

    - + Par4 1.05728068 @@ -426,7 +426,7 @@

    Sequential Performance

    - + CVC4 1.05535055 @@ -440,7 +440,7 @@

    Sequential Performance

    - + SMTInterpol 1.05263158 @@ -454,7 +454,7 @@

    Sequential Performance

    - + Par4 1.04819277 @@ -468,7 +468,7 @@

    Sequential Performance

    - + Par4 1.04613297 @@ -482,7 +482,7 @@

    Sequential Performance

    - + Yices 2.6.2 1.04545455 @@ -496,7 +496,7 @@

    Sequential Performance

    - + Par4 1.04 @@ -510,7 +510,7 @@

    Sequential Performance

    - + Par4 1.03045685 @@ -524,7 +524,7 @@

    Sequential Performance

    - + Par4 1.02626521 @@ -538,7 +538,7 @@

    Sequential Performance

    - + Par4 1.01823708 @@ -552,7 +552,7 @@

    Sequential Performance

    - + SPASS-SATT 1.01734104 @@ -566,7 +566,7 @@

    Sequential Performance

    - + Yices 2.6.2 1.01415094 @@ -580,7 +580,7 @@

    Sequential Performance

    - + Yices 2.6.2 1.01376147 @@ -594,7 +594,7 @@

    Sequential Performance

    - + CVC4 1.01151461 @@ -608,7 +608,7 @@

    Sequential Performance

    - + Par4 1.00787143 @@ -622,7 +622,7 @@

    Sequential Performance

    - + CVC4 1.00436047 @@ -636,7 +636,7 @@

    Sequential Performance

    - + Boolector 1.00378484 @@ -650,7 +650,7 @@

    Sequential Performance

    - + Yices 2.6.2 1.00216685 @@ -664,7 +664,7 @@

    Sequential Performance

    - + Par4 1.00193798 @@ -678,7 +678,7 @@

    Sequential Performance

    - + SMTInterpol 1.00184843 @@ -692,7 +692,7 @@

    Sequential Performance

    - + Boolector 1.00026599 @@ -706,7 +706,7 @@

    Sequential Performance

    - + Yices 2.6.2 1.0 @@ -720,7 +720,7 @@

    Sequential Performance

    - + Yices 2.6.2 1.0 @@ -734,7 +734,7 @@

    Sequential Performance

    - + Yices 2.6.2 1.0 @@ -748,7 +748,7 @@

    Sequential Performance

    - + Yices 2.6.2 1.0 @@ -762,7 +762,7 @@

    Sequential Performance

    - + Vampire 1.0 @@ -776,7 +776,7 @@

    Sequential Performance

    - + Alt-Ergo 1.0 @@ -790,7 +790,7 @@

    Sequential Performance

    - + Yices 2.6.2 1.0 @@ -804,7 +804,7 @@

    Sequential Performance

    - + MathSAT-default 1.0 @@ -818,7 +818,7 @@

    Sequential Performance

    - + CVC4 1.0 @@ -847,7 +847,7 @@

    Parallel Performance

    - + CVC4 4.81111111 @@ -861,7 +861,7 @@

    Parallel Performance

    - + CVC4 4.5 @@ -875,7 +875,7 @@

    Parallel Performance

    - + CVC4 3.12857143 @@ -889,7 +889,7 @@

    Parallel Performance

    - + CVC4 2.5 @@ -903,7 +903,7 @@

    Parallel Performance

    - + CVC4 2.13475177 @@ -917,7 +917,7 @@

    Parallel Performance

    - + CVC4 2.0 @@ -931,7 +931,7 @@

    Parallel Performance

    - + Par4 1.79310345 @@ -945,7 +945,7 @@

    Parallel Performance

    - + CVC4 1.50819672 @@ -959,7 +959,7 @@

    Parallel Performance

    - + SMT-RAT 1.5 @@ -973,7 +973,7 @@

    Parallel Performance

    - + CVC4 1.22298851 @@ -987,7 +987,7 @@

    Parallel Performance

    - + Par4 1.2073903 @@ -1001,7 +1001,7 @@

    Parallel Performance

    - + Par4 1.18181818 @@ -1015,7 +1015,7 @@

    Parallel Performance

    - + Yices 2.6.2 1.18181818 @@ -1029,7 +1029,7 @@

    Parallel Performance

    - + Par4 1.17647059 @@ -1043,7 +1043,7 @@

    Parallel Performance

    - + Par4 1.14285714 @@ -1057,7 +1057,7 @@

    Parallel Performance

    - + CVC4 1.11111111 @@ -1071,7 +1071,7 @@

    Parallel Performance

    - + Par4 1.09589867 @@ -1085,7 +1085,7 @@

    Parallel Performance

    - + Par4 1.08474576 @@ -1099,7 +1099,7 @@

    Parallel Performance

    - + Par4 1.08 @@ -1113,7 +1113,7 @@

    Parallel Performance

    - + Par4 1.06598985 @@ -1127,7 +1127,7 @@

    Parallel Performance

    - + Par4 1.06024096 @@ -1141,7 +1141,7 @@

    Parallel Performance

    - + Par4 1.05939102 @@ -1155,7 +1155,7 @@

    Parallel Performance

    - + CVC4 1.05535055 @@ -1169,7 +1169,7 @@

    Parallel Performance

    - + SMTInterpol 1.05263158 @@ -1183,7 +1183,7 @@

    Parallel Performance

    - + Par4 1.04851752 @@ -1197,7 +1197,7 @@

    Parallel Performance

    - + Yices 2.6.2 1.04545455 @@ -1211,7 +1211,7 @@

    Parallel Performance

    - + Par4 1.02429668 @@ -1225,7 +1225,7 @@

    Parallel Performance

    - + Par4 1.0224924 @@ -1239,7 +1239,7 @@

    Parallel Performance

    - + Par4 1.02162162 @@ -1253,7 +1253,7 @@

    Parallel Performance

    - + Par4 1.01515152 @@ -1267,7 +1267,7 @@

    Parallel Performance

    - + Yices 2.6.2 1.01415094 @@ -1281,7 +1281,7 @@

    Parallel Performance

    - + Vampire 1.01312336 @@ -1295,7 +1295,7 @@

    Parallel Performance

    - + Par4 1.0124631 @@ -1309,7 +1309,7 @@

    Parallel Performance

    - + Yices 2.6.2 1.00913242 @@ -1323,7 +1323,7 @@

    Parallel Performance

    - + Poolector 1.00194241 @@ -1337,7 +1337,7 @@

    Parallel Performance

    - + Par4 1.00193798 @@ -1351,7 +1351,7 @@

    Parallel Performance

    - + SMTInterpol 1.00184843 @@ -1365,7 +1365,7 @@

    Parallel Performance

    - + Vampire 1.00072359 @@ -1379,7 +1379,7 @@

    Parallel Performance

    - + Yices 2.6.2 1.0 @@ -1393,7 +1393,7 @@

    Parallel Performance

    - + Yices 2.6.2 1.0 @@ -1407,7 +1407,7 @@

    Parallel Performance

    - + Yices 2.6.2 1.0 @@ -1421,7 +1421,7 @@

    Parallel Performance

    - + Yices 2.6.2 1.0 @@ -1435,7 +1435,7 @@

    Parallel Performance

    - + Vampire 1.0 @@ -1449,7 +1449,7 @@

    Parallel Performance

    - + Alt-Ergo 1.0 @@ -1463,7 +1463,7 @@

    Parallel Performance

    - + Par4 1.0 @@ -1477,7 +1477,7 @@

    Parallel Performance

    - + Par4 1.0 @@ -1491,7 +1491,7 @@

    Parallel Performance

    - + MathSAT-default 1.0 @@ -1505,7 +1505,7 @@

    Parallel Performance

    - + CVC4 1.0 @@ -1534,7 +1534,7 @@

    SAT Performance

    - + CVC4 94.0 @@ -1548,7 +1548,7 @@

    SAT Performance

    - + Par4 55.0 @@ -1562,7 +1562,7 @@

    SAT Performance

    - + Par4 19.0 @@ -1576,7 +1576,7 @@

    SAT Performance

    - + CVC4 11.0 @@ -1590,7 +1590,7 @@

    SAT Performance

    - + CVC4 5.0 @@ -1604,7 +1604,7 @@

    SAT Performance

    - + CVC4 4.0 @@ -1618,7 +1618,7 @@

    SAT Performance

    - + CVC4 2.5 @@ -1632,7 +1632,7 @@

    SAT Performance

    - + CVC4 2.03389831 @@ -1646,7 +1646,7 @@

    SAT Performance

    - + Par4 2.0 @@ -1660,7 +1660,7 @@

    SAT Performance

    - + Par4 2.0 @@ -1674,7 +1674,7 @@

    SAT Performance

    - + SMTInterpol 2.0 @@ -1688,7 +1688,7 @@

    SAT Performance

    - + CVC4 2.0 @@ -1702,7 +1702,7 @@

    SAT Performance

    - + Par4 2.0 @@ -1716,7 +1716,7 @@

    SAT Performance

    - + Par4 1.5 @@ -1730,7 +1730,7 @@

    SAT Performance

    - + CVC4 1.38 @@ -1744,7 +1744,7 @@

    SAT Performance

    - + Par4 1.19808307 @@ -1758,7 +1758,7 @@

    SAT Performance

    - + Par4 1.14915572 @@ -1772,7 +1772,7 @@

    SAT Performance

    - + Yices 2.6.2 1.14285714 @@ -1786,7 +1786,7 @@

    SAT Performance

    - + Vampire 1.13527851 @@ -1800,7 +1800,7 @@

    SAT Performance

    - + CVC4 1.125 @@ -1814,7 +1814,7 @@

    SAT Performance

    - + Par4 1.08910891 @@ -1828,7 +1828,7 @@

    SAT Performance

    - + Par4 1.08695652 @@ -1842,7 +1842,7 @@

    SAT Performance

    - + Par4 1.07673695 @@ -1856,7 +1856,7 @@

    SAT Performance

    - + CVC4 1.02923977 @@ -1870,7 +1870,7 @@

    SAT Performance

    - + Par4 1.02422145 @@ -1884,7 +1884,7 @@

    SAT Performance

    - + Par4 1.01672241 @@ -1898,7 +1898,7 @@

    SAT Performance

    - + Par4 1.01458576 @@ -1912,7 +1912,7 @@

    SAT Performance

    - + Yices 2.6.2 1.00952381 @@ -1926,7 +1926,7 @@

    SAT Performance

    - + Par4 1.00442478 @@ -1940,7 +1940,7 @@

    SAT Performance

    - + SMTInterpol 1.0030303 @@ -1954,7 +1954,7 @@

    SAT Performance

    - + Poolector 1.00227865 @@ -1968,7 +1968,7 @@

    SAT Performance

    - + Yices 2.6.2 1.0 @@ -1982,7 +1982,7 @@

    SAT Performance

    - + Yices 2.6.2 1.0 @@ -1996,7 +1996,7 @@

    SAT Performance

    - + Yices 2.6.2 1.0 @@ -2010,7 +2010,7 @@

    SAT Performance

    - + Yices 2.6.2 1.0 @@ -2024,7 +2024,7 @@

    SAT Performance

    - + Yices 2.6.2 1.0 @@ -2038,7 +2038,7 @@

    SAT Performance

    - + Par4 1.0 @@ -2052,7 +2052,7 @@

    SAT Performance

    - + Poolector 1.0 @@ -2066,7 +2066,7 @@

    SAT Performance

    - + Yices 2.6.2 1.0 @@ -2080,7 +2080,7 @@

    SAT Performance

    - + Par4 1.0 @@ -2094,7 +2094,7 @@

    SAT Performance

    - + Yices 2.6.2 1.0 @@ -2108,7 +2108,7 @@

    SAT Performance

    - + Yices 2.6.2 1.0 @@ -2122,7 +2122,7 @@

    SAT Performance

    - + MathSAT-default 1.0 @@ -2151,7 +2151,7 @@

    UNSAT Performance

    - + CVC4 5.0 @@ -2165,7 +2165,7 @@

    UNSAT Performance

    - + CVC4 4.75555556 @@ -2179,7 +2179,7 @@

    UNSAT Performance

    - + CVC4 3.12857143 @@ -2193,7 +2193,7 @@

    UNSAT Performance

    - + SMT-RAT 1.5 @@ -2207,7 +2207,7 @@

    UNSAT Performance

    - + Yices 2.6.2 1.18518519 @@ -2221,7 +2221,7 @@

    UNSAT Performance

    - + Par4 1.17241379 @@ -2235,7 +2235,7 @@

    UNSAT Performance

    - + Par4 1.16666667 @@ -2249,7 +2249,7 @@

    UNSAT Performance

    - + Par4 1.16269841 @@ -2263,7 +2263,7 @@

    UNSAT Performance

    - + Par4 1.12175962 @@ -2277,7 +2277,7 @@

    UNSAT Performance

    - + Par4 1.11916264 @@ -2291,7 +2291,7 @@

    UNSAT Performance

    - + CVC4 1.10144928 @@ -2305,7 +2305,7 @@

    UNSAT Performance

    - + CVC4 1.09549072 @@ -2319,7 +2319,7 @@

    UNSAT Performance

    - + Par4 1.06883298 @@ -2333,7 +2333,7 @@

    UNSAT Performance

    - + Yices 2.6.2 1.06190476 @@ -2347,7 +2347,7 @@

    UNSAT Performance

    - + Par4 1.05172414 @@ -2361,7 +2361,7 @@

    UNSAT Performance

    - + Par4 1.04819277 @@ -2375,7 +2375,7 @@

    UNSAT Performance

    - + Par4 1.03370787 @@ -2389,7 +2389,7 @@

    UNSAT Performance

    - + Vampire 1.0313253 @@ -2403,7 +2403,7 @@

    UNSAT Performance

    - + Par4 1.03061224 @@ -2417,7 +2417,7 @@

    UNSAT Performance

    - + Yices 2.6.2 1.02325581 @@ -2431,7 +2431,7 @@

    UNSAT Performance

    - + Par4 1.02073171 @@ -2445,7 +2445,7 @@

    UNSAT Performance

    - + Par4 1.01807229 @@ -2459,7 +2459,7 @@

    UNSAT Performance

    - + Par4 1.00973054 @@ -2473,7 +2473,7 @@

    UNSAT Performance

    - + Yices 2.6.2 1.00917431 @@ -2487,7 +2487,7 @@

    UNSAT Performance

    - + Par4 1.00913838 @@ -2501,7 +2501,7 @@

    UNSAT Performance

    - + Par4 1.00416667 @@ -2515,7 +2515,7 @@

    UNSAT Performance

    - + Par4 1.00347222 @@ -2529,7 +2529,7 @@

    UNSAT Performance

    - + Vampire 1.00192184 @@ -2543,7 +2543,7 @@

    UNSAT Performance

    - + Poolector 1.00176025 @@ -2557,7 +2557,7 @@

    UNSAT Performance

    - + Par4 1.00042355 @@ -2571,7 +2571,7 @@

    UNSAT Performance

    - + CVC4 1.0 @@ -2585,7 +2585,7 @@

    UNSAT Performance

    - + CVC4 1.0 @@ -2599,7 +2599,7 @@

    UNSAT Performance

    - + CVC4 1.0 @@ -2613,7 +2613,7 @@

    UNSAT Performance

    - + Yices 2.6.2 1.0 @@ -2627,7 +2627,7 @@

    UNSAT Performance

    - + Yices 2.6.2 1.0 @@ -2641,7 +2641,7 @@

    UNSAT Performance

    - + Yices 2.6.2 1.0 @@ -2655,7 +2655,7 @@

    UNSAT Performance

    - + CVC4 1.0 @@ -2669,7 +2669,7 @@

    UNSAT Performance

    - + Yices 2.6.2 1.0 @@ -2683,7 +2683,7 @@

    UNSAT Performance

    - + Vampire 1.0 @@ -2697,7 +2697,7 @@

    UNSAT Performance

    - + Yices 2.6.2 1.0 @@ -2711,7 +2711,7 @@

    UNSAT Performance

    - + Par4 1.0 @@ -2725,7 +2725,7 @@

    UNSAT Performance

    - + Par4 1.0 @@ -2739,7 +2739,7 @@

    UNSAT Performance

    - + veriT 1.0 @@ -2753,7 +2753,7 @@

    UNSAT Performance

    - + CVC4 1.0 @@ -2767,7 +2767,7 @@

    UNSAT Performance

    - + Alt-Ergo 1.0 @@ -2781,7 +2781,7 @@

    UNSAT Performance

    - + MathSAT-default 1.0 @@ -2795,7 +2795,7 @@

    UNSAT Performance

    - + MathSAT-na-ext 1.0 @@ -2824,7 +2824,7 @@

    24s Performance

    - + CVC4 4.5 @@ -2838,7 +2838,7 @@

    24s Performance

    - + CVC4 4.37222222 @@ -2852,7 +2852,7 @@

    24s Performance

    - + CVC4 4.0 @@ -2866,7 +2866,7 @@

    24s Performance

    - + Par4 3.71428571 @@ -2880,7 +2880,7 @@

    24s Performance

    - + CVC4 2.18115942 @@ -2894,7 +2894,7 @@

    24s Performance

    - + CVC4 2.0 @@ -2908,7 +2908,7 @@

    24s Performance

    - + Par4 1.9 @@ -2922,7 +2922,7 @@

    24s Performance

    - + CVC4 1.42857143 @@ -2936,7 +2936,7 @@

    24s Performance

    - + CVC4 1.28813559 @@ -2950,7 +2950,7 @@

    24s Performance

    - + Par4 1.22247706 @@ -2964,7 +2964,7 @@

    24s Performance

    - + Par4 1.20851064 @@ -2978,7 +2978,7 @@

    24s Performance

    - + Par4 1.20488534 @@ -2992,7 +2992,7 @@

    24s Performance

    - + Par4 1.2 @@ -3006,7 +3006,7 @@

    24s Performance

    - + Par4 1.19621555 @@ -3020,7 +3020,7 @@

    24s Performance

    - + Par4 1.18473101 @@ -3034,7 +3034,7 @@

    24s Performance

    - + Yices 2.6.2 1.15976331 @@ -3048,7 +3048,7 @@

    24s Performance

    - + Yices 2.6.2 1.14785992 @@ -3062,7 +3062,7 @@

    24s Performance

    - + Vampire 1.09385113 @@ -3076,7 +3076,7 @@

    24s Performance

    - + Par4 1.0931677 @@ -3090,7 +3090,7 @@

    24s Performance

    - + Vampire 1.09259259 @@ -3104,7 +3104,7 @@

    24s Performance

    - + Yices 2.6.2 1.09090909 @@ -3118,7 +3118,7 @@

    24s Performance

    - + Par4 1.08860759 @@ -3132,7 +3132,7 @@

    24s Performance

    - + Vampire 1.07788595 @@ -3146,7 +3146,7 @@

    24s Performance

    - + Par4 1.07142857 @@ -3160,7 +3160,7 @@

    24s Performance

    - + CVC4 1.06367041 @@ -3174,7 +3174,7 @@

    24s Performance

    - + COLIBRI 1.05882353 @@ -3188,7 +3188,7 @@

    24s Performance

    - + Par4 1.05603164 @@ -3202,7 +3202,7 @@

    24s Performance

    - + Par4 1.04061625 @@ -3216,7 +3216,7 @@

    24s Performance

    - + Alt-Ergo 1.03363914 @@ -3230,7 +3230,7 @@

    24s Performance

    - + Yices 2.6.2 1.02189781 @@ -3244,7 +3244,7 @@

    24s Performance

    - + Yices 2.6.2 1.01813472 @@ -3258,7 +3258,7 @@

    24s Performance

    - + Par4 1.01711491 @@ -3272,7 +3272,7 @@

    24s Performance

    - + Yices 2.6.2 1.01006711 @@ -3286,7 +3286,7 @@

    24s Performance

    - + Par4 1.00861538 @@ -3300,7 +3300,7 @@

    24s Performance

    - + Yices 2.6.2 1.00515464 @@ -3314,7 +3314,7 @@

    24s Performance

    - + Par4 1.00400802 @@ -3328,7 +3328,7 @@

    24s Performance

    - + Par4 1.00214448 @@ -3342,7 +3342,7 @@

    24s Performance

    - + Yices 2.6.2 1.00185529 @@ -3356,7 +3356,7 @@

    24s Performance

    - + Yices 2.6.2 1.0015361 @@ -3370,7 +3370,7 @@

    24s Performance

    - + Par4 1.0002849 @@ -3384,7 +3384,7 @@

    24s Performance

    - + Yices 2.6.2 1.0 @@ -3398,7 +3398,7 @@

    24s Performance

    - + Vampire 1.0 @@ -3412,7 +3412,7 @@

    24s Performance

    - + CVC4 1.0 @@ -3426,7 +3426,7 @@

    24s Performance

    - + Alt-Ergo 1.0 @@ -3440,7 +3440,7 @@

    24s Performance

    - + MathSAT-na-ext 1.0 @@ -3454,7 +3454,7 @@

    24s Performance

    - + Yices 2.6.2 1.0 @@ -3468,7 +3468,7 @@

    24s Performance

    - + MathSAT-default 1.0 @@ -3500,7 +3500,6 @@

    24s Performance

    - + - diff --git a/archive/2019/results/biggest-lead-unsat-core.html b/archive/2019/results/biggest-lead-unsat-core.html index 66462ead..ca20e53e 100644 --- a/archive/2019/results/biggest-lead-unsat-core.html +++ b/archive/2019/results/biggest-lead-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -90,35 +90,35 @@

    Winners

    - + - - + + - - - - + + + + - + - - + + - - - - + + + +
    Sequential PerformanceParallel Performance
    - + CVC4-uc - + - + CVC4-uc - +

    Sequential Performance

    @@ -136,7 +136,7 @@

    Sequential Performance

    - + CVC4-uc 929657.0 @@ -150,7 +150,7 @@

    Sequential Performance

    - + CVC4-uc 476908.0 @@ -164,7 +164,7 @@

    Sequential Performance

    - + CVC4-uc 179277.0 @@ -178,7 +178,7 @@

    Sequential Performance

    - + CVC4-uc 99375.0 @@ -192,7 +192,7 @@

    Sequential Performance

    - + CVC4-uc 18664.5 @@ -206,7 +206,7 @@

    Sequential Performance

    - + CVC4-uc 15998.0 @@ -220,7 +220,7 @@

    Sequential Performance

    - + CVC4-uc 1915.0 @@ -234,7 +234,7 @@

    Sequential Performance

    - + CVC4-uc 54.0 @@ -248,7 +248,7 @@

    Sequential Performance

    - + CVC4-uc 14.0 @@ -262,7 +262,7 @@

    Sequential Performance

    - + CVC4-uc 4.5 @@ -276,7 +276,7 @@

    Sequential Performance

    - + CVC4-uc 1.69991167 @@ -290,7 +290,7 @@

    Sequential Performance

    - + Yices 2.6.2 1.42958606 @@ -304,7 +304,7 @@

    Sequential Performance

    - + CVC4-uc 1.31061803 @@ -318,7 +318,7 @@

    Sequential Performance

    - + Yices 2.6.2 1.19628316 @@ -332,7 +332,7 @@

    Sequential Performance

    - + Yices 2.6.2 1.16556634 @@ -346,7 +346,7 @@

    Sequential Performance

    - + Yices 2.6.2 1.13141012 @@ -360,7 +360,7 @@

    Sequential Performance

    - + Yices 2.6.2 1.10243548 @@ -374,7 +374,7 @@

    Sequential Performance

    - + Yices 2.6.2 1.08155159 @@ -388,7 +388,7 @@

    Sequential Performance

    - + CVC4-uc 1.05084746 @@ -402,7 +402,7 @@

    Sequential Performance

    - + Yices 2.6.2 1.02937111 @@ -416,7 +416,7 @@

    Sequential Performance

    - + Yices 2.6.2 1.02189595 @@ -430,7 +430,7 @@

    Sequential Performance

    - + CVC4-uc 1.01786502 @@ -444,7 +444,7 @@

    Sequential Performance

    - + MathSAT-default 1.01156896 @@ -458,7 +458,7 @@

    Sequential Performance

    - + MathSAT-default 1.00698487 @@ -472,7 +472,7 @@

    Sequential Performance

    - + MathSAT-default 1.00123597 @@ -486,7 +486,7 @@

    Sequential Performance

    - + Yices 2.6.2 1.0005061 @@ -500,7 +500,7 @@

    Sequential Performance

    - + Yices 2.6.2 1.0 @@ -514,7 +514,7 @@

    Sequential Performance

    - + MathSAT-na-ext 1.0 @@ -528,7 +528,7 @@

    Sequential Performance

    - + Yices 2.6.2 1.0 @@ -542,7 +542,7 @@

    Sequential Performance

    - + MathSAT-default 1.0 @@ -571,7 +571,7 @@

    Parallel Performance

    - + CVC4-uc 929657.0 @@ -585,7 +585,7 @@

    Parallel Performance

    - + CVC4-uc 476908.0 @@ -599,7 +599,7 @@

    Parallel Performance

    - + CVC4-uc 179277.0 @@ -613,7 +613,7 @@

    Parallel Performance

    - + CVC4-uc 99375.0 @@ -627,7 +627,7 @@

    Parallel Performance

    - + CVC4-uc 18664.5 @@ -641,7 +641,7 @@

    Parallel Performance

    - + CVC4-uc 15998.0 @@ -655,7 +655,7 @@

    Parallel Performance

    - + CVC4-uc 1915.0 @@ -669,7 +669,7 @@

    Parallel Performance

    - + CVC4-uc 54.0 @@ -683,7 +683,7 @@

    Parallel Performance

    - + CVC4-uc 14.0 @@ -697,7 +697,7 @@

    Parallel Performance

    - + CVC4-uc 4.5 @@ -711,7 +711,7 @@

    Parallel Performance

    - + CVC4-uc 1.69991167 @@ -725,7 +725,7 @@

    Parallel Performance

    - + Yices 2.6.2 1.42958606 @@ -739,7 +739,7 @@

    Parallel Performance

    - + CVC4-uc 1.31061803 @@ -753,7 +753,7 @@

    Parallel Performance

    - + Yices 2.6.2 1.19628316 @@ -767,7 +767,7 @@

    Parallel Performance

    - + Yices 2.6.2 1.16556634 @@ -781,7 +781,7 @@

    Parallel Performance

    - + Yices 2.6.2 1.13141012 @@ -795,7 +795,7 @@

    Parallel Performance

    - + Yices 2.6.2 1.10243548 @@ -809,7 +809,7 @@

    Parallel Performance

    - + Yices 2.6.2 1.08155159 @@ -823,7 +823,7 @@

    Parallel Performance

    - + CVC4-uc 1.05084746 @@ -837,7 +837,7 @@

    Parallel Performance

    - + Yices 2.6.2 1.02937111 @@ -851,7 +851,7 @@

    Parallel Performance

    - + Yices 2.6.2 1.02189595 @@ -865,7 +865,7 @@

    Parallel Performance

    - + CVC4-uc 1.01786502 @@ -879,7 +879,7 @@

    Parallel Performance

    - + MathSAT-default 1.01156896 @@ -893,7 +893,7 @@

    Parallel Performance

    - + MathSAT-default 1.00698487 @@ -907,7 +907,7 @@

    Parallel Performance

    - + MathSAT-default 1.00123597 @@ -921,7 +921,7 @@

    Parallel Performance

    - + Yices 2.6.2 1.0005061 @@ -935,7 +935,7 @@

    Parallel Performance

    - + Yices 2.6.2 1.0 @@ -949,7 +949,7 @@

    Parallel Performance

    - + MathSAT-na-ext 1.0 @@ -963,7 +963,7 @@

    Parallel Performance

    - + Yices 2.6.2 1.0 @@ -977,7 +977,7 @@

    Parallel Performance

    - + MathSAT-default 1.0 @@ -1015,7 +1015,6 @@

    Parallel Performance

    - + - diff --git a/archive/2019/results/bv-incremental.html b/archive/2019/results/bv-incremental.html index 0c3847e8..a1a10396 100644 --- a/archive/2019/results/bv-incremental.html +++ b/archive/2019/results/bv-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    BV (Incremental Track)

    Competition results for the BV - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    BV (Incremental Track)

    Parallel Performance CVC4-inc - - + + @@ -124,7 +124,7 @@

    BV (Incremental Track)

    - + Z3n 0 3714012847.44112847.20217163 @@ -133,7 +133,7 @@

    BV (Incremental Track)

    - + CVC4-inc 0 3589719861.10419859.80529598 @@ -142,7 +142,7 @@

    BV (Incremental Track)

    - + 2018-Z3 (incremental)n 0 31662115.045113.9671940 @@ -151,7 +151,7 @@

    BV (Incremental Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 139092519.862457.451249471 @@ -160,7 +160,7 @@

    BV (Incremental Track)

    - + Q3B 0 00.0530.193388560 @@ -184,7 +184,6 @@

    BV (Incremental Track)

    - + - diff --git a/archive/2019/results/bv-single-query.html b/archive/2019/results/bv-single-query.html index aee5d874..97ceea3b 100644 --- a/archive/2019/results/bv-single-query.html +++ b/archive/2019/results/bv-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    BV (Single Query Track)

    Competition results for the BV - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    BV (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Par4Par4Par4 - - + + Par4 - - + + Par4 - + @@ -131,7 +131,7 @@

    BV (Single Query Track)

    - + Par4 0 770 @@ -142,7 +142,7 @@

    BV (Single Query Track)

    - + Q3B 0 736 @@ -153,7 +153,7 @@

    BV (Single Query Track)

    - + 2018-CVC4n 0 722 @@ -164,7 +164,7 @@

    BV (Single Query Track)

    - + CVC4 0 722 @@ -175,7 +175,7 @@

    BV (Single Query Track)

    - + Poolector 0 702 @@ -186,7 +186,7 @@

    BV (Single Query Track)

    - + Boolector 0 691 @@ -197,7 +197,7 @@

    BV (Single Query Track)

    - + Z3n 0 688 @@ -208,7 +208,7 @@

    BV (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 274 @@ -230,7 +230,7 @@

    BV (Single Query Track)

    - + Par4 0 777162126.297123607.517772265514646 @@ -239,7 +239,7 @@

    BV (Single Query Track)

    - + Q3B 0 741218253.473198536.8057412255168272 @@ -248,7 +248,7 @@

    BV (Single Query Track)

    - + 2018-CVC4n 0 722291188.913292445.478722191531101101 @@ -257,7 +257,7 @@

    BV (Single Query Track)

    - + CVC4 0 722333430.045335475.076722189533101101 @@ -266,7 +266,7 @@

    BV (Single Query Track)

    - + Poolector 0 711343502.886287805.665711223488112112 @@ -275,7 +275,7 @@

    BV (Single Query Track)

    - + Boolector 0 698344389.78323873.368698214484125125 @@ -284,7 +284,7 @@

    BV (Single Query Track)

    - + Z3n 0 688331830.275331810.002688198490135134 @@ -293,7 +293,7 @@

    BV (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 274392041.302361021.5062749265549132 @@ -313,7 +313,7 @@

    BV (Single Query Track)

    - + Par4 0 22662329.28437328.871226226059746 @@ -322,7 +322,7 @@

    BV (Single Query Track)

    - + Q3B 0 22544707.62134362.751225225059872 @@ -331,7 +331,7 @@

    BV (Single Query Track)

    - + Poolector 0 22371316.05544962.4882232230600112 @@ -340,7 +340,7 @@

    BV (Single Query Track)

    - + Boolector 0 21474742.67366401.9512142140609125 @@ -349,7 +349,7 @@

    BV (Single Query Track)

    - + Z3n 0 19896217.29796217.6041981980625134 @@ -358,7 +358,7 @@

    BV (Single Query Track)

    - + 2018-CVC4n 0 191154577.722155633.6991911910632101 @@ -367,7 +367,7 @@

    BV (Single Query Track)

    - + CVC4 0 189201052.813202877.3631891890634101 @@ -376,7 +376,7 @@

    BV (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 9234551.169213346.58990814132 @@ -396,7 +396,7 @@

    BV (Single Query Track)

    - + Par4 0 55142197.01328678.639551055127246 @@ -405,7 +405,7 @@

    BV (Single Query Track)

    - + CVC4 0 53374777.23274997.7135330533290101 @@ -414,7 +414,7 @@

    BV (Single Query Track)

    - + 2018-CVC4n 0 53179011.1979211.7795310531292101 @@ -423,7 +423,7 @@

    BV (Single Query Track)

    - + Q3B 0 516115945.853106574.054516051630772 @@ -432,7 +432,7 @@

    BV (Single Query Track)

    - + Z3n 0 490178012.978177992.3984900490333134 @@ -441,7 +441,7 @@

    BV (Single Query Track)

    - + Poolector 0 488214586.831185243.1774880488335112 @@ -450,7 +450,7 @@

    BV (Single Query Track)

    - + Boolector 0 484212047.107199871.4174840484339125 @@ -459,7 +459,7 @@

    BV (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 265112383.498106851.5392650265558132 @@ -479,7 +479,7 @@

    BV (Single Query Track)

    - + Par4 0 7423643.6472428.247422085348181 @@ -488,7 +488,7 @@

    BV (Single Query Track)

    - + Q3B 0 7134031.0693121.869713205508110100 @@ -497,7 +497,7 @@

    BV (Single Query Track)

    - + Poolector 0 6646433.6464600.048664208456159159 @@ -506,7 +506,7 @@

    BV (Single Query Track)

    - + Z3n 0 6554475.364453.575655192463168168 @@ -515,7 +515,7 @@

    BV (Single Query Track)

    - + Boolector 0 6435108.2674771.941643196447180180 @@ -524,7 +524,7 @@

    BV (Single Query Track)

    - + CVC4 0 6265357.7245355.033626108518197197 @@ -533,7 +533,7 @@

    BV (Single Query Track)

    - + 2018-CVC4n 0 6076058.3056056.13360797510216216 @@ -542,7 +542,7 @@

    BV (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 2726301.4735504.5432728264551150 @@ -566,7 +566,6 @@

    BV (Single Query Track)

    - + - diff --git a/archive/2019/results/bv-unsat-core.html b/archive/2019/results/bv-unsat-core.html index 0d86435c..b6c516da 100644 --- a/archive/2019/results/bv-unsat-core.html +++ b/archive/2019/results/bv-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    BV (Unsat Core Track)

    Competition results for the BV - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    BV (Unsat Core Track)

    Sequential PerformanceParallel Performance CVC4-ucCVC4-uc - - + + @@ -126,7 +126,7 @@

    BV (Unsat Core Track)

    - + CVC4-uc 0 53 @@ -137,7 +137,7 @@

    BV (Unsat Core Track)

    - + Z3n 0 33 @@ -148,7 +148,7 @@

    BV (Unsat Core Track)

    - + 2018-Z3 (unsat core)n 0 25 @@ -159,7 +159,7 @@

    BV (Unsat Core Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 0 @@ -181,7 +181,7 @@

    BV (Unsat Core Track)

    - + CVC4-uc 0 5314574.10314574.6775 @@ -190,7 +190,7 @@

    BV (Unsat Core Track)

    - + Z3n 0 337262.9157254.1432 @@ -199,7 +199,7 @@

    BV (Unsat Core Track)

    - + 2018-Z3 (unsat core)n 0 258021.2718022.0812 @@ -208,7 +208,7 @@

    BV (Unsat Core Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 046772.17446334.66517 @@ -232,7 +232,6 @@

    BV (Unsat Core Track)

    - + - diff --git a/archive/2019/results/bvfp-incremental.html b/archive/2019/results/bvfp-incremental.html index f70bf84c..ed1f8949 100644 --- a/archive/2019/results/bvfp-incremental.html +++ b/archive/2019/results/bvfp-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    BVFP (Incremental Track)

    Competition results for the BVFP - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    BVFP (Incremental Track)

    Parallel Performance CVC4-inc - - + + @@ -124,7 +124,7 @@

    BVFP (Incremental Track)

    - + CVC4-inc 0 322416839.11116838.96528317 @@ -133,7 +133,7 @@

    BVFP (Incremental Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 237882.0652.7936770 @@ -142,7 +142,7 @@

    BVFP (Incremental Track)

    - + Z3n 0 00.030.10660550 @@ -166,7 +166,6 @@

    BVFP (Incremental Track)

    - + - diff --git a/archive/2019/results/bvfp-single-query.html b/archive/2019/results/bvfp-single-query.html index 25f46a6f..63f81c8f 100644 --- a/archive/2019/results/bvfp-single-query.html +++ b/archive/2019/results/bvfp-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    BVFP (Single Query Track)

    Competition results for the BVFP - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    BVFP (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) CVC4CVC4CVC4 - - + + CVC4 - - + + CVC4 - + @@ -131,7 +131,7 @@

    BVFP (Single Query Track)

    - + Z3n 0 24 @@ -142,7 +142,7 @@

    BVFP (Single Query Track)

    - + CVC4 0 8 @@ -153,7 +153,7 @@

    BVFP (Single Query Track)

    - + 2018-CVC4n 0 8 @@ -164,7 +164,7 @@

    BVFP (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 1 @@ -186,7 +186,7 @@

    BVFP (Single Query Track)

    - + Z3n 0 241416.2671423.6382420400 @@ -195,7 +195,7 @@

    BVFP (Single Query Track)

    - + CVC4 0 838405.38238405.3818441616 @@ -204,7 +204,7 @@

    BVFP (Single Query Track)

    - + 2018-CVC4n 0 838406.00838406.0078441616 @@ -213,7 +213,7 @@

    BVFP (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 181.64957.463110230 @@ -233,7 +233,7 @@

    BVFP (Single Query Track)

    - + Z3n 0 201211.7171219.0472020040 @@ -242,7 +242,7 @@

    BVFP (Single Query Track)

    - + CVC4 0 438402.87138402.8714402016 @@ -251,7 +251,7 @@

    BVFP (Single Query Track)

    - + 2018-CVC4n 0 438403.22738403.2264402016 @@ -260,7 +260,7 @@

    BVFP (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 168.18547.71110230 @@ -280,7 +280,7 @@

    BVFP (Single Query Track)

    - + CVC4 0 42.5112.514042016 @@ -289,7 +289,7 @@

    BVFP (Single Query Track)

    - + 2018-CVC4n 0 42.782.7814042016 @@ -298,7 +298,7 @@

    BVFP (Single Query Track)

    - + Z3n 0 4204.55204.591404200 @@ -307,7 +307,7 @@

    BVFP (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 013.4649.753000240 @@ -327,7 +327,7 @@

    BVFP (Single Query Track)

    - + Z3n 0 12337.37337.37212931212 @@ -336,7 +336,7 @@

    BVFP (Single Query Track)

    - + CVC4 0 8389.382389.3818441616 @@ -345,7 +345,7 @@

    BVFP (Single Query Track)

    - + 2018-CVC4n 0 8390.008390.0078441616 @@ -354,7 +354,7 @@

    BVFP (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 181.64957.463110230 @@ -378,7 +378,6 @@

    BVFP (Single Query Track)

    - + - diff --git a/archive/2019/results/fp-single-query.html b/archive/2019/results/fp-single-query.html index e684e7ae..8a5e7410 100644 --- a/archive/2019/results/fp-single-query.html +++ b/archive/2019/results/fp-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    FP (Single Query Track)

    Competition results for the FP - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    FP (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) CVC4CVC4CVC4 - - + + CVC4 - - + + CVC4 - + @@ -131,7 +131,7 @@

    FP (Single Query Track)

    - + Z3n 0 1037 @@ -142,7 +142,7 @@

    FP (Single Query Track)

    - + 2018-CVC4n 0 880 @@ -153,7 +153,7 @@

    FP (Single Query Track)

    - + CVC4 0 865 @@ -164,7 +164,7 @@

    FP (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 179 @@ -186,7 +186,7 @@

    FP (Single Query Track)

    - + Z3n 0 1037506987.757506914.631037201017197137 @@ -195,7 +195,7 @@

    FP (Single Query Track)

    - + 2018-CVC4n 0 880722233.327722210.34288010870354260 @@ -204,7 +204,7 @@

    FP (Single Query Track)

    - + CVC4 0 865643910.121643867.34586510855369254 @@ -213,7 +213,7 @@

    FP (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 1794129.2122878.785179017910550 @@ -233,7 +233,7 @@

    FP (Single Query Track)

    - + Z3n 0 201516.4181516.735202001214137 @@ -242,7 +242,7 @@

    FP (Single Query Track)

    - + 2018-CVC4n 0 1024020.05624020.054101001224260 @@ -251,7 +251,7 @@

    FP (Single Query Track)

    - + CVC4 0 1024022.68824022.688101001224254 @@ -260,7 +260,7 @@

    FP (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 066.9547.67600012340 @@ -280,7 +280,7 @@

    FP (Single Query Track)

    - + Z3n 0 1017176651.752176577.957101701017217137 @@ -289,7 +289,7 @@

    FP (Single Query Track)

    - + 2018-CVC4n 0 870357413.271357390.2878700870364260 @@ -298,7 +298,7 @@

    FP (Single Query Track)

    - + CVC4 0 855279087.432279044.6578550855379254 @@ -307,7 +307,7 @@

    FP (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 1793589.0852501.181179017910550 @@ -327,7 +327,7 @@

    FP (Single Query Track)

    - + Z3n 0 87010130.35310084.22287017853364305 @@ -336,7 +336,7 @@

    FP (Single Query Track)

    - + 2018-CVC4n 0 79412016.34611987.32379410784440436 @@ -345,7 +345,7 @@

    FP (Single Query Track)

    - + CVC4 0 7869666.1619665.2678610776448333 @@ -354,7 +354,7 @@

    FP (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 1794129.2122878.785179017910550 @@ -378,7 +378,6 @@

    FP (Single Query Track)

    - + - diff --git a/archive/2019/results/largest-contribution-challenge-incremental.html b/archive/2019/results/largest-contribution-challenge-incremental.html index 19eb524f..4471a1ba 100644 --- a/archive/2019/results/largest-contribution-challenge-incremental.html +++ b/archive/2019/results/largest-contribution-challenge-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -90,27 +90,27 @@

    Winners

    - - + + - - - - + + + + - - + + - - - - + + + +
    Parallel Performance
    - + Yices 2.6.2 Incremental - +
    @@ -130,7 +130,7 @@

    Parallel Performance

    - + Yices 2.6.2 Incremental 0.09129144 @@ -144,7 +144,7 @@

    Parallel Performance

    - + Boolector (incremental) 0.0106383 @@ -158,7 +158,7 @@

    Parallel Performance

    - + STP-incremental 0.0 @@ -196,7 +196,6 @@

    Parallel Performance

    - + - diff --git a/archive/2019/results/largest-contribution-challenge-non-incremental.html b/archive/2019/results/largest-contribution-challenge-non-incremental.html index 223ec9d7..5893af3a 100644 --- a/archive/2019/results/largest-contribution-challenge-non-incremental.html +++ b/archive/2019/results/largest-contribution-challenge-non-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -90,59 +90,59 @@

    Winners

    - + - - + + - - + + - - + + - - + + - + - + - - + + - - + + - - + + - - + + - +
    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24 seconds Performance (parallel)
    - + Yices 2.6.2 - + - + Yices 2.6.2 - + - + Yices 2.6.2 - + - + Poolector - + - + Minkeyrink Solver - +

    Sequential Performance

    @@ -160,7 +160,7 @@

    Sequential Performance

    - + Yices 2.6.2 0.16957136 @@ -174,7 +174,7 @@

    Sequential Performance

    - + Yices 2.6.2 New Bvsolver 0.0 @@ -188,7 +188,7 @@

    Sequential Performance

    - + Boolector 0.0 @@ -217,7 +217,7 @@

    Parallel Performance

    - + Yices 2.6.2 0.16957136 @@ -231,7 +231,7 @@

    Parallel Performance

    - + Yices 2.6.2 New Bvsolver 0.0 @@ -245,7 +245,7 @@

    Parallel Performance

    - + Boolector 0.0 @@ -274,7 +274,7 @@

    SAT Performance

    - + Yices 2.6.2 0.1865285 @@ -288,7 +288,7 @@

    SAT Performance

    - + Yices 2.6.2 New Bvsolver 0.0 @@ -302,7 +302,7 @@

    SAT Performance

    - + Boolector 0.0 @@ -331,7 +331,7 @@

    UNSAT Performance

    - + Poolector 0.0 @@ -360,7 +360,7 @@

    24s Performance

    - + Minkeyrink Solver 0.10880829 @@ -374,7 +374,7 @@

    24s Performance

    - + Yices 2.6.2 0.10362694 @@ -388,7 +388,7 @@

    24s Performance

    - + Poolector 0.03626943 @@ -420,7 +420,6 @@

    24s Performance

    - + - diff --git a/archive/2019/results/largest-contribution-incremental.html b/archive/2019/results/largest-contribution-incremental.html index e8ea9fa8..01b3c6f0 100644 --- a/archive/2019/results/largest-contribution-incremental.html +++ b/archive/2019/results/largest-contribution-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -90,27 +90,27 @@

    Winners

    - - + + - - - - + + + + - - + + - - - - + + + +
    Parallel Performance
    - + CVC4-inc - +
    @@ -130,7 +130,7 @@

    Parallel Performance

    - + CVC4-inc 0.04057221 @@ -144,7 +144,7 @@

    Parallel Performance

    - + CVC4-inc 0.01794909 @@ -158,7 +158,7 @@

    Parallel Performance

    - + CVC4-inc 0.00119938 @@ -172,7 +172,7 @@

    Parallel Performance

    - + Boolector (incremental) 0.00062182 @@ -186,7 +186,7 @@

    Parallel Performance

    - + Boolector (incremental) 0.00048648 @@ -200,7 +200,7 @@

    Parallel Performance

    - + CVC4-inc 0.00040821 @@ -214,7 +214,7 @@

    Parallel Performance

    - + Yices 2.6.2 Incremental 0.00039738 @@ -228,7 +228,7 @@

    Parallel Performance

    - + CVC4-inc 0.00032635 @@ -242,7 +242,7 @@

    Parallel Performance

    - + Yices 2.6.2 Incremental 0.00023998 @@ -256,7 +256,7 @@

    Parallel Performance

    - + MathSAT-na-ext 6.298e-05 @@ -270,7 +270,7 @@

    Parallel Performance

    - + Yices 2.6.2 Incremental 4.257e-05 @@ -284,7 +284,7 @@

    Parallel Performance

    - + Yices 2.6.2 Incremental 4.228e-05 @@ -298,7 +298,7 @@

    Parallel Performance

    - + CVC4-inc 3.545e-05 @@ -312,7 +312,7 @@

    Parallel Performance

    - + SMTInterpol 1.992e-05 @@ -326,7 +326,7 @@

    Parallel Performance

    - + CVC4-inc 3.2e-07 @@ -340,7 +340,7 @@

    Parallel Performance

    - + SMTInterpol 1.2e-07 @@ -354,7 +354,7 @@

    Parallel Performance

    - + CVC4-inc 9.0e-08 @@ -368,7 +368,7 @@

    Parallel Performance

    - + Yices 2.6.2 Incremental 0.0 @@ -382,7 +382,7 @@

    Parallel Performance

    - + Yices 2.6.2 Incremental 0.0 @@ -396,7 +396,7 @@

    Parallel Performance

    - + MathSAT-default 0.0 @@ -410,7 +410,7 @@

    Parallel Performance

    - + MathSAT-default 0.0 @@ -448,7 +448,6 @@

    Parallel Performance

    - + - diff --git a/archive/2019/results/largest-contribution-model-validation.html b/archive/2019/results/largest-contribution-model-validation.html index bb2c9819..60778244 100644 --- a/archive/2019/results/largest-contribution-model-validation.html +++ b/archive/2019/results/largest-contribution-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -103,7 +103,7 @@

    Sequential Performance

    - + Boolector 0.00166991 @@ -132,7 +132,7 @@

    Parallel Performance

    - + Boolector 0.00166922 @@ -170,7 +170,6 @@

    Parallel Performance

    - + - diff --git a/archive/2019/results/largest-contribution-single-query.html b/archive/2019/results/largest-contribution-single-query.html index 91b50cb5..6bb92105 100644 --- a/archive/2019/results/largest-contribution-single-query.html +++ b/archive/2019/results/largest-contribution-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -90,59 +90,59 @@

    Winners

    - + - - + + - - + + - - + + - - + + - + - + - - + + - - + + - - + + - - + + - +
    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24 seconds Performance (parallel)
    - + CVC4 - + - + CVC4 - + - + Par4 - + - + Par4 - + - + Vampire - +

    Sequential Performance

    @@ -160,7 +160,7 @@

    Sequential Performance

    - + CVC4 0.00427411 @@ -174,7 +174,7 @@

    Sequential Performance

    - + Vampire 0.00336154 @@ -188,7 +188,7 @@

    Sequential Performance

    - + Par4 0.00314984 @@ -202,7 +202,7 @@

    Sequential Performance

    - + CVC4 0.00197351 @@ -216,7 +216,7 @@

    Sequential Performance

    - + CVC4 0.00193427 @@ -230,7 +230,7 @@

    Sequential Performance

    - + Par4 0.00124067 @@ -244,7 +244,7 @@

    Sequential Performance

    - + CVC4 0.00115031 @@ -258,7 +258,7 @@

    Sequential Performance

    - + Par4 0.00095988 @@ -272,7 +272,7 @@

    Sequential Performance

    - + Vampire 0.00056755 @@ -286,7 +286,7 @@

    Sequential Performance

    - + CVC4 0.00052943 @@ -300,7 +300,7 @@

    Sequential Performance

    - + Vampire 0.00046463 @@ -314,7 +314,7 @@

    Sequential Performance

    - + Par4 0.00042188 @@ -328,7 +328,7 @@

    Sequential Performance

    - + Par4 0.00029702 @@ -342,7 +342,7 @@

    Sequential Performance

    - + Boolector 0.00027639 @@ -356,7 +356,7 @@

    Sequential Performance

    - + Par4 0.00018485 @@ -370,7 +370,7 @@

    Sequential Performance

    - + Q3B 0.00014144 @@ -384,7 +384,7 @@

    Sequential Performance

    - + Ctrl-Ergo 0.00012282 @@ -398,7 +398,7 @@

    Sequential Performance

    - + Yices 2.6.2 9.934e-05 @@ -412,7 +412,7 @@

    Sequential Performance

    - + Par4 8.632e-05 @@ -426,7 +426,7 @@

    Sequential Performance

    - + Yices 2.6.2 7.589e-05 @@ -440,7 +440,7 @@

    Sequential Performance

    - + COLIBRI 7.563e-05 @@ -454,7 +454,7 @@

    Sequential Performance

    - + CVC4 4.808e-05 @@ -468,7 +468,7 @@

    Sequential Performance

    - + Yices 2.6.2 4.095e-05 @@ -482,7 +482,7 @@

    Sequential Performance

    - + CVC4 2.846e-05 @@ -496,7 +496,7 @@

    Sequential Performance

    - + Par4 2.53e-05 @@ -510,7 +510,7 @@

    Sequential Performance

    - + SPASS-SATT 1.544e-05 @@ -524,7 +524,7 @@

    Sequential Performance

    - + SMTInterpol 1.518e-05 @@ -538,7 +538,7 @@

    Sequential Performance

    - + Boolector 9.51e-06 @@ -552,7 +552,7 @@

    Sequential Performance

    - + SMTInterpol 9.49e-06 @@ -566,7 +566,7 @@

    Sequential Performance

    - + SMT-RAT 9.49e-06 @@ -580,7 +580,7 @@

    Sequential Performance

    - + Yices 2.6.2 8.76e-06 @@ -594,7 +594,7 @@

    Sequential Performance

    - + Par4 7.59e-06 @@ -608,7 +608,7 @@

    Sequential Performance

    - + Par4 0.0 @@ -622,7 +622,7 @@

    Sequential Performance

    - + Yices 2.6.2 0.0 @@ -636,7 +636,7 @@

    Sequential Performance

    - + Yices 2.6.2 0.0 @@ -650,7 +650,7 @@

    Sequential Performance

    - + Yices 2.6.2 0.0 @@ -664,7 +664,7 @@

    Sequential Performance

    - + Yices 2.6.2 0.0 @@ -678,7 +678,7 @@

    Sequential Performance

    - + CVC4 0.0 @@ -692,7 +692,7 @@

    Sequential Performance

    - + Par4 0.0 @@ -706,7 +706,7 @@

    Sequential Performance

    - + CVC4 0.0 @@ -720,7 +720,7 @@

    Sequential Performance

    - + Vampire 0.0 @@ -734,7 +734,7 @@

    Sequential Performance

    - + MathSAT-default 0.0 @@ -748,7 +748,7 @@

    Sequential Performance

    - + veriT 0.0 @@ -777,7 +777,7 @@

    Parallel Performance

    - + CVC4 0.00400607 @@ -791,7 +791,7 @@

    Parallel Performance

    - + Vampire 0.00355404 @@ -805,7 +805,7 @@

    Parallel Performance

    - + Par4 0.00308559 @@ -819,7 +819,7 @@

    Parallel Performance

    - + CVC4 0.00195833 @@ -833,7 +833,7 @@

    Parallel Performance

    - + CVC4 0.00182865 @@ -847,7 +847,7 @@

    Parallel Performance

    - + Par4 0.00141705 @@ -861,7 +861,7 @@

    Parallel Performance

    - + CVC4 0.00115031 @@ -875,7 +875,7 @@

    Parallel Performance

    - + Par4 0.00095988 @@ -889,7 +889,7 @@

    Parallel Performance

    - + Vampire 0.00073269 @@ -903,7 +903,7 @@

    Parallel Performance

    - + CVC4 0.00052943 @@ -917,7 +917,7 @@

    Parallel Performance

    - + Vampire 0.00046303 @@ -931,7 +931,7 @@

    Parallel Performance

    - + Par4 0.00042188 @@ -945,7 +945,7 @@

    Parallel Performance

    - + Par4 0.00019801 @@ -959,7 +959,7 @@

    Parallel Performance

    - + Par4 0.00018485 @@ -973,7 +973,7 @@

    Parallel Performance

    - + Par4 0.00016269 @@ -987,7 +987,7 @@

    Parallel Performance

    - + Poolector 0.00013813 @@ -1001,7 +1001,7 @@

    Parallel Performance

    - + Ctrl-Ergo 0.00013804 @@ -1015,7 +1015,7 @@

    Parallel Performance

    - + Q3B 0.000129 @@ -1029,7 +1029,7 @@

    Parallel Performance

    - + Yices 2.6.2 9.934e-05 @@ -1043,7 +1043,7 @@

    Parallel Performance

    - + Par4 8.632e-05 @@ -1057,7 +1057,7 @@

    Parallel Performance

    - + COLIBRI 7.496e-05 @@ -1071,7 +1071,7 @@

    Parallel Performance

    - + CVC4 4.808e-05 @@ -1085,7 +1085,7 @@

    Parallel Performance

    - + Yices 2.6.2 4.095e-05 @@ -1099,7 +1099,7 @@

    Parallel Performance

    - + CVC4 2.846e-05 @@ -1113,7 +1113,7 @@

    Parallel Performance

    - + Par4 2.53e-05 @@ -1127,7 +1127,7 @@

    Parallel Performance

    - + SPASS-SATT 1.544e-05 @@ -1141,7 +1141,7 @@

    Parallel Performance

    - + SMTInterpol 1.518e-05 @@ -1155,7 +1155,7 @@

    Parallel Performance

    - + Par4 1.328e-05 @@ -1169,7 +1169,7 @@

    Parallel Performance

    - + Par4 9.51e-06 @@ -1183,7 +1183,7 @@

    Parallel Performance

    - + SMTInterpol 9.49e-06 @@ -1197,7 +1197,7 @@

    Parallel Performance

    - + SMT-RAT 9.49e-06 @@ -1211,7 +1211,7 @@

    Parallel Performance

    - + Yices 2.6.2 8.76e-06 @@ -1225,7 +1225,7 @@

    Parallel Performance

    - + Par4 7.59e-06 @@ -1239,7 +1239,7 @@

    Parallel Performance

    - + veriT 0.0 @@ -1253,7 +1253,7 @@

    Parallel Performance

    - + Yices 2.6.2 0.0 @@ -1267,7 +1267,7 @@

    Parallel Performance

    - + Yices 2.6.2 0.0 @@ -1281,7 +1281,7 @@

    Parallel Performance

    - + Yices 2.6.2 0.0 @@ -1295,7 +1295,7 @@

    Parallel Performance

    - + Yices 2.6.2 0.0 @@ -1309,7 +1309,7 @@

    Parallel Performance

    - + CVC4 0.0 @@ -1323,7 +1323,7 @@

    Parallel Performance

    - + CVC4 0.0 @@ -1337,7 +1337,7 @@

    Parallel Performance

    - + Vampire 0.0 @@ -1351,7 +1351,7 @@

    Parallel Performance

    - + MathSAT-default 0.0 @@ -1365,7 +1365,7 @@

    Parallel Performance

    - + veriT 0.0 @@ -1394,7 +1394,7 @@

    SAT Performance

    - + Par4 0.02874303 @@ -1408,7 +1408,7 @@

    SAT Performance

    - + SMTInterpol 0.00972788 @@ -1422,7 +1422,7 @@

    SAT Performance

    - + Vampire 0.00668556 @@ -1436,7 +1436,7 @@

    SAT Performance

    - + CVC4 0.00551134 @@ -1450,7 +1450,7 @@

    SAT Performance

    - + CVC4 0.00414216 @@ -1464,7 +1464,7 @@

    SAT Performance

    - + CVC4 0.00354559 @@ -1478,7 +1478,7 @@

    SAT Performance

    - + SMTInterpol 0.00242203 @@ -1492,7 +1492,7 @@

    SAT Performance

    - + Par4 0.00227713 @@ -1506,7 +1506,7 @@

    SAT Performance

    - + Par4 0.00213739 @@ -1520,7 +1520,7 @@

    SAT Performance

    - + CVC4 0.00156552 @@ -1534,7 +1534,7 @@

    SAT Performance

    - + Par4 0.00088808 @@ -1548,7 +1548,7 @@

    SAT Performance

    - + Par4 0.00088239 @@ -1562,7 +1562,7 @@

    SAT Performance

    - + Par4 0.00085914 @@ -1576,7 +1576,7 @@

    SAT Performance

    - + Par4 0.00040988 @@ -1590,7 +1590,7 @@

    SAT Performance

    - + SMTInterpol 0.00028844 @@ -1604,7 +1604,7 @@

    SAT Performance

    - + Ctrl-Ergo 0.00024414 @@ -1618,7 +1618,7 @@

    SAT Performance

    - + Q3B 0.00019686 @@ -1632,7 +1632,7 @@

    SAT Performance

    - + Poolector 0.00019671 @@ -1646,7 +1646,7 @@

    SAT Performance

    - + Par4 0.00013637 @@ -1660,7 +1660,7 @@

    SAT Performance

    - + Par4 0.00010121 @@ -1674,7 +1674,7 @@

    SAT Performance

    - + COLIBRI 9.412e-05 @@ -1688,7 +1688,7 @@

    SAT Performance

    - + Yices 2.6.2 4.446e-05 @@ -1702,7 +1702,7 @@

    SAT Performance

    - + CVC4 3.18e-05 @@ -1716,7 +1716,7 @@

    SAT Performance

    - + Par4 2.8e-05 @@ -1730,7 +1730,7 @@

    SAT Performance

    - + CVC4 2.319e-05 @@ -1744,7 +1744,7 @@

    SAT Performance

    - + SMTInterpol 1.555e-05 @@ -1758,7 +1758,7 @@

    SAT Performance

    - + Par4 1.439e-05 @@ -1772,7 +1772,7 @@

    SAT Performance

    - + Yices 2.6.2 0.0 @@ -1786,7 +1786,7 @@

    SAT Performance

    - + Poolector 0.0 @@ -1800,7 +1800,7 @@

    SAT Performance

    - + Yices 2.6.2 0.0 @@ -1814,7 +1814,7 @@

    SAT Performance

    - + Yices 2.6.2 0.0 @@ -1828,7 +1828,7 @@

    SAT Performance

    - + Yices 2.6.2 0.0 @@ -1842,7 +1842,7 @@

    SAT Performance

    - + Yices 2.6.2 0.0 @@ -1856,7 +1856,7 @@

    SAT Performance

    - + Yices 2.6.2 0.0 @@ -1870,7 +1870,7 @@

    SAT Performance

    - + Yices 2.6.2 0.0 @@ -1884,7 +1884,7 @@

    SAT Performance

    - + Yices 2.6.2 0.0 @@ -1898,7 +1898,7 @@

    SAT Performance

    - + Yices 2.6.2 0.0 @@ -1912,7 +1912,7 @@

    SAT Performance

    - + MathSAT-default 0.0 @@ -1941,7 +1941,7 @@

    UNSAT Performance

    - + Par4 0.00339904 @@ -1955,7 +1955,7 @@

    UNSAT Performance

    - + Par4 0.00301761 @@ -1969,7 +1969,7 @@

    UNSAT Performance

    - + Vampire 0.00186316 @@ -1983,7 +1983,7 @@

    UNSAT Performance

    - + CVC4 0.00115031 @@ -1997,7 +1997,7 @@

    UNSAT Performance

    - + Par4 0.00095653 @@ -2011,7 +2011,7 @@

    UNSAT Performance

    - + Vampire 0.00073696 @@ -2025,7 +2025,7 @@

    UNSAT Performance

    - + CVC4 0.00055166 @@ -2039,7 +2039,7 @@

    UNSAT Performance

    - + Vampire 0.00051805 @@ -2053,7 +2053,7 @@

    UNSAT Performance

    - + CVC4 0.00039209 @@ -2067,7 +2067,7 @@

    UNSAT Performance

    - + CVC4 0.0003074 @@ -2081,7 +2081,7 @@

    UNSAT Performance

    - + Par4 0.00020998 @@ -2095,7 +2095,7 @@

    UNSAT Performance

    - + Vampire 0.00015526 @@ -2109,7 +2109,7 @@

    UNSAT Performance

    - + Yices 2.6.2 0.00013335 @@ -2123,7 +2123,7 @@

    UNSAT Performance

    - + Poolector 0.00010644 @@ -2137,7 +2137,7 @@

    UNSAT Performance

    - + Par4 0.00010587 @@ -2151,7 +2151,7 @@

    UNSAT Performance

    - + Q3B 0.00010022 @@ -2165,7 +2165,7 @@

    UNSAT Performance

    - + Par4 7.757e-05 @@ -2179,7 +2179,7 @@

    UNSAT Performance

    - + CVC4 7.298e-05 @@ -2193,7 +2193,7 @@

    UNSAT Performance

    - + Par4 6.21e-05 @@ -2207,7 +2207,7 @@

    UNSAT Performance

    - + COLIBRI 5.328e-05 @@ -2221,7 +2221,7 @@

    UNSAT Performance

    - + CVC4 5.218e-05 @@ -2235,7 +2235,7 @@

    UNSAT Performance

    - + Yices 2.6.2 4.016e-05 @@ -2249,7 +2249,7 @@

    UNSAT Performance

    - + SPASS-SATT 3.439e-05 @@ -2263,7 +2263,7 @@

    UNSAT Performance

    - + Par4 3.028e-05 @@ -2277,7 +2277,7 @@

    UNSAT Performance

    - + Vampire 1.839e-05 @@ -2291,7 +2291,7 @@

    UNSAT Performance

    - + Yices 2.6.2 1.72e-05 @@ -2305,7 +2305,7 @@

    UNSAT Performance

    - + SMT-RAT 9.49e-06 @@ -2319,7 +2319,7 @@

    UNSAT Performance

    - + Par4 8.86e-06 @@ -2333,7 +2333,7 @@

    UNSAT Performance

    - + CVC4 0.0 @@ -2347,7 +2347,7 @@

    UNSAT Performance

    - + CVC4 0.0 @@ -2361,7 +2361,7 @@

    UNSAT Performance

    - + Vampire 0.0 @@ -2375,7 +2375,7 @@

    UNSAT Performance

    - + Yices 2.6.2 0.0 @@ -2389,7 +2389,7 @@

    UNSAT Performance

    - + Yices 2.6.2 0.0 @@ -2403,7 +2403,7 @@

    UNSAT Performance

    - + veriT 0.0 @@ -2417,7 +2417,7 @@

    UNSAT Performance

    - + Yices 2.6.2 0.0 @@ -2431,7 +2431,7 @@

    UNSAT Performance

    - + Yices 2.6.2 0.0 @@ -2445,7 +2445,7 @@

    UNSAT Performance

    - + Alt-Ergo 0.0 @@ -2459,7 +2459,7 @@

    UNSAT Performance

    - + Yices 2.6.2 0.0 @@ -2473,7 +2473,7 @@

    UNSAT Performance

    - + CVC4 0.0 @@ -2487,7 +2487,7 @@

    UNSAT Performance

    - + Par4 0.0 @@ -2501,7 +2501,7 @@

    UNSAT Performance

    - + MathSAT-na-ext 0.0 @@ -2515,7 +2515,7 @@

    UNSAT Performance

    - + veriT 0.0 @@ -2529,7 +2529,7 @@

    UNSAT Performance

    - + MathSAT-default 0.0 @@ -2558,7 +2558,7 @@

    24s Performance

    - + Vampire 0.01247027 @@ -2572,7 +2572,7 @@

    24s Performance

    - + Par4 0.00612962 @@ -2586,7 +2586,7 @@

    24s Performance

    - + Par4 0.00383819 @@ -2600,7 +2600,7 @@

    24s Performance

    - + CVC4 0.00207978 @@ -2614,7 +2614,7 @@

    24s Performance

    - + Par4 0.00151092 @@ -2628,7 +2628,7 @@

    24s Performance

    - + Par4 0.00141212 @@ -2642,7 +2642,7 @@

    24s Performance

    - + Vampire 0.00127143 @@ -2656,7 +2656,7 @@

    24s Performance

    - + Vampire 0.00125658 @@ -2670,7 +2670,7 @@

    24s Performance

    - + Vampire 0.00083977 @@ -2684,7 +2684,7 @@

    24s Performance

    - + Par4 0.0006779 @@ -2698,7 +2698,7 @@

    24s Performance

    - + Par4 0.00056816 @@ -2712,7 +2712,7 @@

    24s Performance

    - + Vampire 0.00050395 @@ -2726,7 +2726,7 @@

    24s Performance

    - + Yices 2.6.2 0.00032245 @@ -2740,7 +2740,7 @@

    24s Performance

    - + CVC4 0.00031035 @@ -2754,7 +2754,7 @@

    24s Performance

    - + Par4 0.0003054 @@ -2768,7 +2768,7 @@

    24s Performance

    - + Vampire 0.00030038 @@ -2782,7 +2782,7 @@

    24s Performance

    - + Yices 2.6.2 0.00022461 @@ -2796,7 +2796,7 @@

    24s Performance

    - + COLIBRI 0.00021667 @@ -2810,7 +2810,7 @@

    24s Performance

    - + Q3B 0.00021624 @@ -2824,7 +2824,7 @@

    24s Performance

    - + Par4 0.00021261 @@ -2838,7 +2838,7 @@

    24s Performance

    - + Yices 2.6.2 0.00018174 @@ -2852,7 +2852,7 @@

    24s Performance

    - + Ctrl-Ergo 0.00017358 @@ -2866,7 +2866,7 @@

    24s Performance

    - + Par4 0.0001535 @@ -2880,7 +2880,7 @@

    24s Performance

    - + Par4 8.727e-05 @@ -2894,7 +2894,7 @@

    24s Performance

    - + CVC4 4.808e-05 @@ -2908,7 +2908,7 @@

    24s Performance

    - + CVC4 3.479e-05 @@ -2922,7 +2922,7 @@

    24s Performance

    - + Yices 2.6.2 2.846e-05 @@ -2936,7 +2936,7 @@

    24s Performance

    - + Par4 2.76e-05 @@ -2950,7 +2950,7 @@

    24s Performance

    - + Yices 2.6.2 2.667e-05 @@ -2964,7 +2964,7 @@

    24s Performance

    - + Poolector 1.91e-05 @@ -2978,7 +2978,7 @@

    24s Performance

    - + Yices 2.6.2 1.898e-05 @@ -2992,7 +2992,7 @@

    24s Performance

    - + SMTInterpol 1.518e-05 @@ -3006,7 +3006,7 @@

    24s Performance

    - + veriT 1.329e-05 @@ -3020,7 +3020,7 @@

    24s Performance

    - + Yices 2.6.2 9.52e-06 @@ -3034,7 +3034,7 @@

    24s Performance

    - + Yices 2.6.2 9.49e-06 @@ -3048,7 +3048,7 @@

    24s Performance

    - + Boolector 8.64e-06 @@ -3062,7 +3062,7 @@

    24s Performance

    - + Yices 2.6.2 0.0 @@ -3076,7 +3076,7 @@

    24s Performance

    - + Vampire 0.0 @@ -3090,7 +3090,7 @@

    24s Performance

    - + MathSAT-default 0.0 @@ -3104,7 +3104,7 @@

    24s Performance

    - + CVC4 0.0 @@ -3118,7 +3118,7 @@

    24s Performance

    - + Yices 2.6.2 0.0 @@ -3132,7 +3132,7 @@

    24s Performance

    - + veriT 0.0 @@ -3164,7 +3164,6 @@

    24s Performance

    - + - diff --git a/archive/2019/results/largest-contribution-unsat-core.html b/archive/2019/results/largest-contribution-unsat-core.html index 2025b0cb..4c9ec157 100644 --- a/archive/2019/results/largest-contribution-unsat-core.html +++ b/archive/2019/results/largest-contribution-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -90,35 +90,35 @@

    Winners

    - + - - + + - - - - + + + + - + - - + + - - - - + + + +
    Sequential PerformanceParallel Performance
    - + CVC4-uc - + - + CVC4-uc - +

    Sequential Performance

    @@ -136,7 +136,7 @@

    Sequential Performance

    - + CVC4-uc 0.43380473 @@ -150,7 +150,7 @@

    Sequential Performance

    - + CVC4-uc 0.16970388 @@ -164,7 +164,7 @@

    Sequential Performance

    - + CVC4-uc 0.07482419 @@ -178,7 +178,7 @@

    Sequential Performance

    - + CVC4-uc 0.05463555 @@ -192,7 +192,7 @@

    Sequential Performance

    - + CVC4-uc 0.05200391 @@ -206,7 +206,7 @@

    Sequential Performance

    - + CVC4-uc 0.01731269 @@ -220,7 +220,7 @@

    Sequential Performance

    - + CVC4-uc 0.0076799 @@ -234,7 +234,7 @@

    Sequential Performance

    - + CVC4-uc 0.00407255 @@ -248,7 +248,7 @@

    Sequential Performance

    - + MathSAT-default 0.00325743 @@ -262,7 +262,7 @@

    Sequential Performance

    - + Yices 2.6.2 0.00199465 @@ -276,7 +276,7 @@

    Sequential Performance

    - + CVC4-uc 0.00131655 @@ -290,7 +290,7 @@

    Sequential Performance

    - + Yices 2.6.2 0.00126762 @@ -304,7 +304,7 @@

    Sequential Performance

    - + CVC4-uc 0.00043885 @@ -318,7 +318,7 @@

    Sequential Performance

    - + Yices 2.6.2 0.00034276 @@ -332,7 +332,7 @@

    Sequential Performance

    - + SMTInterpol 0.00032135 @@ -346,7 +346,7 @@

    Sequential Performance

    - + CVC4-uc 0.00016349 @@ -360,7 +360,7 @@

    Sequential Performance

    - + CVC4-uc 0.00013987 @@ -374,7 +374,7 @@

    Sequential Performance

    - + CVC4-uc 0.00012049 @@ -388,7 +388,7 @@

    Sequential Performance

    - + CVC4-uc 6.248e-05 @@ -402,7 +402,7 @@

    Sequential Performance

    - + Yices 2.6.2 3.388e-05 @@ -416,7 +416,7 @@

    Sequential Performance

    - + CVC4-uc 2.27e-06 @@ -430,7 +430,7 @@

    Sequential Performance

    - + Yices 2.6.2 0.0 @@ -444,7 +444,7 @@

    Sequential Performance

    - + Yices 2.6.2 0.0 @@ -458,7 +458,7 @@

    Sequential Performance

    - + MathSAT-default 0.0 @@ -472,7 +472,7 @@

    Sequential Performance

    - + MathSAT-na-ext 0.0 @@ -501,7 +501,7 @@

    Parallel Performance

    - + CVC4-uc 0.43380473 @@ -515,7 +515,7 @@

    Parallel Performance

    - + CVC4-uc 0.16970388 @@ -529,7 +529,7 @@

    Parallel Performance

    - + CVC4-uc 0.07482419 @@ -543,7 +543,7 @@

    Parallel Performance

    - + CVC4-uc 0.05463555 @@ -557,7 +557,7 @@

    Parallel Performance

    - + CVC4-uc 0.05200391 @@ -571,7 +571,7 @@

    Parallel Performance

    - + CVC4-uc 0.01731269 @@ -585,7 +585,7 @@

    Parallel Performance

    - + CVC4-uc 0.0076799 @@ -599,7 +599,7 @@

    Parallel Performance

    - + CVC4-uc 0.00407255 @@ -613,7 +613,7 @@

    Parallel Performance

    - + MathSAT-default 0.00325743 @@ -627,7 +627,7 @@

    Parallel Performance

    - + Yices 2.6.2 0.00199465 @@ -641,7 +641,7 @@

    Parallel Performance

    - + CVC4-uc 0.00131655 @@ -655,7 +655,7 @@

    Parallel Performance

    - + Yices 2.6.2 0.00126762 @@ -669,7 +669,7 @@

    Parallel Performance

    - + CVC4-uc 0.00043885 @@ -683,7 +683,7 @@

    Parallel Performance

    - + Yices 2.6.2 0.00034276 @@ -697,7 +697,7 @@

    Parallel Performance

    - + SMTInterpol 0.00032135 @@ -711,7 +711,7 @@

    Parallel Performance

    - + CVC4-uc 0.00016349 @@ -725,7 +725,7 @@

    Parallel Performance

    - + CVC4-uc 0.00013987 @@ -739,7 +739,7 @@

    Parallel Performance

    - + CVC4-uc 0.00012049 @@ -753,7 +753,7 @@

    Parallel Performance

    - + CVC4-uc 6.248e-05 @@ -767,7 +767,7 @@

    Parallel Performance

    - + Yices 2.6.2 3.388e-05 @@ -781,7 +781,7 @@

    Parallel Performance

    - + CVC4-uc 2.27e-06 @@ -795,7 +795,7 @@

    Parallel Performance

    - + Yices 2.6.2 0.0 @@ -809,7 +809,7 @@

    Parallel Performance

    - + Yices 2.6.2 0.0 @@ -823,7 +823,7 @@

    Parallel Performance

    - + MathSAT-default 0.0 @@ -837,7 +837,7 @@

    Parallel Performance

    - + MathSAT-na-ext 0.0 @@ -875,7 +875,6 @@

    Parallel Performance

    - + - diff --git a/archive/2019/results/lia-incremental.html b/archive/2019/results/lia-incremental.html index e77a6a3c..84ba4c0f 100644 --- a/archive/2019/results/lia-incremental.html +++ b/archive/2019/results/lia-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    LIA (Incremental Track)

    Competition results for the LIA - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    LIA (Incremental Track)

    Parallel Performance SMTInterpol - - + + @@ -124,7 +124,7 @@

    LIA (Incremental Track)

    - + 2018-Z3 (incremental)n 0 253938.5657.72100 @@ -133,7 +133,7 @@

    LIA (Incremental Track)

    - + Z3n 0 253938.9248.10800 @@ -142,7 +142,7 @@

    LIA (Incremental Track)

    - + SMTInterpol 0 2539159.77520.58320 @@ -151,7 +151,7 @@

    LIA (Incremental Track)

    - + UltimateEliminator+SMTInterpol 0 2538391.8932.496100 @@ -160,7 +160,7 @@

    LIA (Incremental Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 2538397.0939.91100 @@ -169,7 +169,7 @@

    LIA (Incremental Track)

    - + CVC4-inc 0 244862416.8712415.99071 @@ -178,7 +178,7 @@

    LIA (Incremental Track)

    - + UltimateEliminator+Yices-2.6.1 0 017.7848.997253930 @@ -202,7 +202,6 @@

    LIA (Incremental Track)

    - + - diff --git a/archive/2019/results/lia-single-query.html b/archive/2019/results/lia-single-query.html index 0d86604f..2d152661 100644 --- a/archive/2019/results/lia-single-query.html +++ b/archive/2019/results/lia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    LIA (Single Query Track)

    Competition results for the LIA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    LIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) CVC4CVC4CVC4 - - + + CVC4 - - + + CVC4 - + @@ -131,7 +131,7 @@

    LIA (Single Query Track)

    - + Z3n 0 300 @@ -142,7 +142,7 @@

    LIA (Single Query Track)

    - + 2018-Z3n 0 300 @@ -153,7 +153,7 @@

    LIA (Single Query Track)

    - + CVC4 0 300 @@ -164,7 +164,7 @@

    LIA (Single Query Track)

    - + Vampire 0 139 @@ -175,7 +175,7 @@

    LIA (Single Query Track)

    - + SMTInterpol 0 105 @@ -186,7 +186,7 @@

    LIA (Single Query Track)

    - + UltimateEliminator+SMTInterpol 0 79 @@ -197,7 +197,7 @@

    LIA (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 78 @@ -208,7 +208,7 @@

    LIA (Single Query Track)

    - + veriT 0 69 @@ -219,7 +219,7 @@

    LIA (Single Query Track)

    - + ProB 0 47 @@ -230,7 +230,7 @@

    LIA (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 41 @@ -252,7 +252,7 @@

    LIA (Single Query Track)

    - + Z3n 0 30014.90314.90530014915100 @@ -261,7 +261,7 @@

    LIA (Single Query Track)

    - + 2018-Z3n 0 30015.36615.38730014915100 @@ -270,7 +270,7 @@

    LIA (Single Query Track)

    - + CVC4 0 30037.66137.56230014915100 @@ -279,7 +279,7 @@

    LIA (Single Query Track)

    - + Vampire 0 140400402.59386287.4611403137160160 @@ -288,7 +288,7 @@

    LIA (Single Query Track)

    - + SMTInterpol 0 105212145.383211506.94310589719581 @@ -297,7 +297,7 @@

    LIA (Single Query Track)

    - + UltimateEliminator+SMTInterpol 0 79369450.547368747.947792950221152 @@ -306,7 +306,7 @@

    LIA (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 78369012.387368474.22782949222153 @@ -315,7 +315,7 @@

    LIA (Single Query Track)

    - + veriT 0 696417.2066407.286690692311 @@ -324,7 +324,7 @@

    LIA (Single Query Track)

    - + ProB 0 47409662.554420197.431471730253161 @@ -333,7 +333,7 @@

    LIA (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 41203967.219203312.91741103125982 @@ -353,7 +353,7 @@

    LIA (Single Query Track)

    - + Z3n 0 1498.0638.06414914901510 @@ -362,7 +362,7 @@

    LIA (Single Query Track)

    - + 2018-Z3n 0 1498.3688.37714914901510 @@ -371,7 +371,7 @@

    LIA (Single Query Track)

    - + CVC4 0 14930.74130.69414914901510 @@ -380,7 +380,7 @@

    LIA (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 29288510.339288296.92429290271153 @@ -389,7 +389,7 @@

    LIA (Single Query Track)

    - + UltimateEliminator+SMTInterpol 0 29289019.58288731.87129290271152 @@ -398,7 +398,7 @@

    LIA (Single Query Track)

    - + ProB 0 17278015.743283350.27517170283161 @@ -407,7 +407,7 @@

    LIA (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 10176705.516176254.3991010029082 @@ -416,7 +416,7 @@

    LIA (Single Query Track)

    - + SMTInterpol 0 8172052.431171676.00988029281 @@ -425,7 +425,7 @@

    LIA (Single Query Track)

    - + Vampire 0 3357600.684350354.183330297160 @@ -434,7 +434,7 @@

    LIA (Single Query Track)

    - + veriT 0 04716.4194706.4890003001 @@ -454,7 +454,7 @@

    LIA (Single Query Track)

    - + Z3n 0 1516.8416.84115101511490 @@ -463,7 +463,7 @@

    LIA (Single Query Track)

    - + CVC4 0 1516.9196.86915101511490 @@ -472,7 +472,7 @@

    LIA (Single Query Track)

    - + 2018-Z3n 0 1516.9997.0115101511490 @@ -481,7 +481,7 @@

    LIA (Single Query Track)

    - + Vampire 0 13742801.90735933.2781370137163160 @@ -490,7 +490,7 @@

    LIA (Single Query Track)

    - + SMTInterpol 0 9740092.95239830.9349709720381 @@ -499,7 +499,7 @@

    LIA (Single Query Track)

    - + veriT 0 691700.7861700.796690692311 @@ -508,7 +508,7 @@

    LIA (Single Query Track)

    - + UltimateEliminator+SMTInterpol 0 5080430.96880016.07650050250152 @@ -517,7 +517,7 @@

    LIA (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 4980502.04780177.29649049251153 @@ -526,7 +526,7 @@

    LIA (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 3127261.70327058.5173103126982 @@ -535,7 +535,7 @@

    LIA (Single Query Track)

    - + ProB 0 30131646.811136847.15630030270161 @@ -555,7 +555,7 @@

    LIA (Single Query Track)

    - + Z3n 0 30014.90314.90530014915100 @@ -564,7 +564,7 @@

    LIA (Single Query Track)

    - + 2018-Z3n 0 30015.36615.38730014915100 @@ -573,7 +573,7 @@

    LIA (Single Query Track)

    - + CVC4 0 30037.66137.56230014915100 @@ -582,7 +582,7 @@

    LIA (Single Query Track)

    - + Vampire 0 1374331.24034.031373134163163 @@ -591,7 +591,7 @@

    LIA (Single Query Track)

    - + SMTInterpol 0 1053494.3343245.851105897195120 @@ -600,7 +600,7 @@

    LIA (Single Query Track)

    - + veriT 0 691052.1081041.945690692317 @@ -609,7 +609,7 @@

    LIA (Single Query Track)

    - + UltimateEliminator+SMTInterpol 0 694492.4314222.22692445231162 @@ -618,7 +618,7 @@

    LIA (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 694504.3564262.009692445231162 @@ -627,7 +627,7 @@

    LIA (Single Query Track)

    - + ProB 0 464870.594874.276461630254195 @@ -636,7 +636,7 @@

    LIA (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 413211.0132807.8441103125993 @@ -660,7 +660,6 @@

    LIA (Single Query Track)

    - + - diff --git a/archive/2019/results/lia-unsat-core.html b/archive/2019/results/lia-unsat-core.html index cb40a065..a478ba31 100644 --- a/archive/2019/results/lia-unsat-core.html +++ b/archive/2019/results/lia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    LIA (Unsat Core Track)

    Competition results for the LIA - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    LIA (Unsat Core Track)

    Sequential PerformanceParallel Performance CVC4-ucCVC4-uc - - + + @@ -126,7 +126,7 @@

    LIA (Unsat Core Track)

    - + 2018-Z3 (unsat core)n 0 12 @@ -137,7 +137,7 @@

    LIA (Unsat Core Track)

    - + Z3n 0 11 @@ -148,7 +148,7 @@

    LIA (Unsat Core Track)

    - + CVC4-uc 0 8 @@ -159,7 +159,7 @@

    LIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 1 @@ -170,7 +170,7 @@

    LIA (Unsat Core Track)

    - + UltimateEliminator+SMTInterpol 0 1 @@ -181,7 +181,7 @@

    LIA (Unsat Core Track)

    - + UltimateEliminator+Yices-2.6.1 0 0 @@ -203,7 +203,7 @@

    LIA (Unsat Core Track)

    - + 2018-Z3 (unsat core)n 0 1210.21110.2250 @@ -212,7 +212,7 @@

    LIA (Unsat Core Track)

    - + Z3n 0 1110.38110.3860 @@ -221,7 +221,7 @@

    LIA (Unsat Core Track)

    - + CVC4-uc 0 89.4689.4660 @@ -230,7 +230,7 @@

    LIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 178056.11177708.41932 @@ -239,7 +239,7 @@

    LIA (Unsat Core Track)

    - + UltimateEliminator+SMTInterpol 0 178669.95878307.8632 @@ -248,7 +248,7 @@

    LIA (Unsat Core Track)

    - + UltimateEliminator+Yices-2.6.1 0 027460.47427177.11911 @@ -272,7 +272,6 @@

    LIA (Unsat Core Track)

    - + - diff --git a/archive/2019/results/lra-incremental.html b/archive/2019/results/lra-incremental.html index 8ce3c81e..be6a7e65 100644 --- a/archive/2019/results/lra-incremental.html +++ b/archive/2019/results/lra-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    LRA (Incremental Track)

    Competition results for the LRA - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    LRA (Incremental Track)

    Parallel Performance — - - + + @@ -124,7 +124,7 @@

    LRA (Incremental Track)

    - + CVC4-inc 0 00.030.043159690 @@ -133,7 +133,7 @@

    LRA (Incremental Track)

    - + Z3n 0 00.1690.168159690 @@ -142,7 +142,7 @@

    LRA (Incremental Track)

    - + SMTInterpol 0 01.0160.886159690 @@ -151,7 +151,7 @@

    LRA (Incremental Track)

    - + UltimateEliminator+SMTInterpol 0 015.1729.788159690 @@ -160,7 +160,7 @@

    LRA (Incremental Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 015.33910.952159690 @@ -169,7 +169,7 @@

    LRA (Incremental Track)

    - + UltimateEliminator+Yices-2.6.1 0 015.71911.028159690 @@ -193,7 +193,6 @@

    LRA (Incremental Track)

    - + - diff --git a/archive/2019/results/lra-single-query.html b/archive/2019/results/lra-single-query.html index 6a678f7c..c57bdd45 100644 --- a/archive/2019/results/lra-single-query.html +++ b/archive/2019/results/lra-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    LRA (Single Query Track)

    Competition results for the LRA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    LRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Par4Par4Par4 - - + + Par4 - - + + Par4 - + @@ -131,7 +131,7 @@

    LRA (Single Query Track)

    - + Z3n 0 945 @@ -142,7 +142,7 @@

    LRA (Single Query Track)

    - + Par4 0 941 @@ -153,7 +153,7 @@

    LRA (Single Query Track)

    - + 2018-Z3n 0 931 @@ -164,7 +164,7 @@

    LRA (Single Query Track)

    - + CVC4 0 815 @@ -175,7 +175,7 @@

    LRA (Single Query Track)

    - + UltimateEliminator+SMTInterpol 0 654 @@ -186,7 +186,7 @@

    LRA (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 650 @@ -197,7 +197,7 @@

    LRA (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 366 @@ -208,7 +208,7 @@

    LRA (Single Query Track)

    - + Vampire 0 223 @@ -219,7 +219,7 @@

    LRA (Single Query Track)

    - + SMTInterpol 0 152 @@ -241,7 +241,7 @@

    LRA (Single Query Track)

    - + Par4 0 959208579.777157110.1299593745854444 @@ -250,7 +250,7 @@

    LRA (Single Query Track)

    - + Z3n 0 945223907.291223924.4019453685775858 @@ -259,7 +259,7 @@

    LRA (Single Query Track)

    - + 2018-Z3n 0 931263071.751263062.4819313665657272 @@ -268,7 +268,7 @@

    LRA (Single Query Track)

    - + CVC4 0 815530547.78532089.07815312503188188 @@ -277,7 +277,7 @@

    LRA (Single Query Track)

    - + UltimateEliminator+SMTInterpol 0 654863725.43859053.7654222432349348 @@ -286,7 +286,7 @@

    LRA (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 650871513.733868354.119650222428353351 @@ -295,7 +295,7 @@

    LRA (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 366134056.58132381.40236614721963754 @@ -304,7 +304,7 @@

    LRA (Single Query Track)

    - + Vampire 0 2281996128.1371882520.7962280228775775 @@ -313,7 +313,7 @@

    LRA (Single Query Track)

    - + SMTInterpol 0 15266069.51563342.322152314985120 @@ -333,7 +333,7 @@

    LRA (Single Query Track)

    - + Par4 0 37428756.23116844.534374374062944 @@ -342,7 +342,7 @@

    LRA (Single Query Track)

    - + Z3n 0 36834444.3534445.772368368063558 @@ -351,7 +351,7 @@

    LRA (Single Query Track)

    - + 2018-Z3n 0 36641246.38841247.195366366063772 @@ -360,7 +360,7 @@

    LRA (Single Query Track)

    - + CVC4 0 312185842.04186748.6913123120691188 @@ -369,7 +369,7 @@

    LRA (Single Query Track)

    - + UltimateEliminator+SMTInterpol 0 222374836.13373711.2712222220781348 @@ -378,7 +378,7 @@

    LRA (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 222375949.478374764.522222220781351 @@ -387,7 +387,7 @@

    LRA (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 147102614.377101963.856147147085654 @@ -396,7 +396,7 @@

    LRA (Single Query Track)

    - + SMTInterpol 0 350151.28148972.237330100020 @@ -405,7 +405,7 @@

    LRA (Single Query Track)

    - + Vampire 0 0931201.2901778.120001003775 @@ -425,7 +425,7 @@

    LRA (Single Query Track)

    - + Par4 0 585110223.54670665.596585058541844 @@ -434,7 +434,7 @@

    LRA (Single Query Track)

    - + Z3n 0 577119862.941119878.629577057742658 @@ -443,7 +443,7 @@

    LRA (Single Query Track)

    - + 2018-Z3n 0 565152225.363152215.286565056543872 @@ -452,7 +452,7 @@

    LRA (Single Query Track)

    - + CVC4 0 503275105.74275740.3795030503500188 @@ -461,7 +461,7 @@

    LRA (Single Query Track)

    - + UltimateEliminator+SMTInterpol 0 432419289.3415742.4284320432571348 @@ -470,7 +470,7 @@

    LRA (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 428425964.255423989.5994280428575351 @@ -479,7 +479,7 @@

    LRA (Single Query Track)

    - + Vampire 0 228995326.937911142.6762280228775775 @@ -488,7 +488,7 @@

    LRA (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 21928859.54927915.927219021978454 @@ -497,7 +497,7 @@

    LRA (Single Query Track)

    - + SMTInterpol 0 14914950.36313509.312149014985420 @@ -517,7 +517,7 @@

    LRA (Single Query Track)

    - + Par4 0 8515341.944546.927851337514152152 @@ -526,7 +526,7 @@

    LRA (Single Query Track)

    - + Z3n 0 7836882.2576883.06783332451220220 @@ -535,7 +535,7 @@

    LRA (Single Query Track)

    - + 2018-Z3n 0 7497934.347920.521749319430254254 @@ -544,7 +544,7 @@

    LRA (Single Query Track)

    - + CVC4 0 7047436.4687437.749704272432299299 @@ -553,7 +553,7 @@

    LRA (Single Query Track)

    - + UltimateEliminator+SMTInterpol 0 57714632.56312400.84577204373426426 @@ -562,7 +562,7 @@

    LRA (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 57613832.94612379.637576204372427427 @@ -571,7 +571,7 @@

    LRA (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 3655494.6463890.21136514621963860 @@ -580,7 +580,7 @@

    LRA (Single Query Track)

    - + SMTInterpol 0 1484987.5873137.887148314585579 @@ -589,7 +589,7 @@

    LRA (Single Query Track)

    - + Vampire 0 14121876.921012.0961410141862862 @@ -613,7 +613,6 @@

    LRA (Single Query Track)

    - + - diff --git a/archive/2019/results/nia-single-query.html b/archive/2019/results/nia-single-query.html index 1061a5dd..169f4abf 100644 --- a/archive/2019/results/nia-single-query.html +++ b/archive/2019/results/nia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    NIA (Single Query Track)

    Competition results for the NIA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    NIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) CVC4CVC4CVC4 - - + + CVC4 - - + + CVC4 - + @@ -131,7 +131,7 @@

    NIA (Single Query Track)

    - + 2018-Z3n 0 11 @@ -142,7 +142,7 @@

    NIA (Single Query Track)

    - + Z3n 0 10 @@ -153,7 +153,7 @@

    NIA (Single Query Track)

    - + CVC4 0 9 @@ -164,7 +164,7 @@

    NIA (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 8 @@ -175,7 +175,7 @@

    NIA (Single Query Track)

    - + ProB 0 5 @@ -186,7 +186,7 @@

    NIA (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 1 @@ -197,7 +197,7 @@

    NIA (Single Query Track)

    - + Vampire 0 1 @@ -219,7 +219,7 @@

    NIA (Single Query Track)

    - + 2018-Z3n 0 113.1273.128119200 @@ -228,7 +228,7 @@

    NIA (Single Query Track)

    - + Z3n 0 102400.5932400.592109111 @@ -237,7 +237,7 @@

    NIA (Single Query Track)

    - + CVC4 0 90.7540.7598120 @@ -246,7 +246,7 @@

    NIA (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 87494.0587425.48487133 @@ -255,7 +255,7 @@

    NIA (Single Query Track)

    - + ProB 0 59800.9429801.3655063 @@ -264,7 +264,7 @@

    NIA (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 1702.906655.183110100 @@ -273,7 +273,7 @@

    NIA (Single Query Track)

    - + Vampire 0 124161.6324040.8391011010 @@ -293,7 +293,7 @@

    NIA (Single Query Track)

    - + 2018-Z3n 0 90.4940.49499020 @@ -302,7 +302,7 @@

    NIA (Single Query Track)

    - + Z3n 0 90.5320.53299021 @@ -311,7 +311,7 @@

    NIA (Single Query Track)

    - + CVC4 0 80.5230.5288030 @@ -320,7 +320,7 @@

    NIA (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 74998.3684961.82977043 @@ -329,7 +329,7 @@

    NIA (Single Query Track)

    - + ProB 0 56889.5046889.92155063 @@ -338,7 +338,7 @@

    NIA (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 1692.464649.475110100 @@ -347,7 +347,7 @@

    NIA (Single Query Track)

    - + Vampire 0 021600.021600.00001110 @@ -367,7 +367,7 @@

    NIA (Single Query Track)

    - + 2018-Z3n 0 22.6342.63420290 @@ -376,7 +376,7 @@

    NIA (Single Query Track)

    - + CVC4 0 10.2310.23101100 @@ -385,7 +385,7 @@

    NIA (Single Query Track)

    - + Z3n 0 12400.062400.06101101 @@ -394,7 +394,7 @@

    NIA (Single Query Track)

    - + Vampire 0 12561.632440.8391011010 @@ -403,7 +403,7 @@

    NIA (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 12495.692463.655101103 @@ -412,7 +412,7 @@

    NIA (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 010.4425.709000110 @@ -421,7 +421,7 @@

    NIA (Single Query Track)

    - + ProB 0 02911.4382911.439000113 @@ -441,7 +441,7 @@

    NIA (Single Query Track)

    - + 2018-Z3n 0 113.1273.128119200 @@ -450,7 +450,7 @@

    NIA (Single Query Track)

    - + Z3n 0 1024.59324.592109111 @@ -459,7 +459,7 @@

    NIA (Single Query Track)

    - + CVC4 0 90.7540.7598120 @@ -468,7 +468,7 @@

    NIA (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 6139.781133.84966055 @@ -477,7 +477,7 @@

    NIA (Single Query Track)

    - + ProB 0 5128.674128.78155065 @@ -486,7 +486,7 @@

    NIA (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 161.50646.867110101 @@ -495,7 +495,7 @@

    NIA (Single Query Track)

    - + Vampire 0 0264.0264.00001111 @@ -519,7 +519,6 @@

    NIA (Single Query Track)

    - + - diff --git a/archive/2019/results/nia-unsat-core.html b/archive/2019/results/nia-unsat-core.html index ff73a85b..2f33a186 100644 --- a/archive/2019/results/nia-unsat-core.html +++ b/archive/2019/results/nia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    NIA (Unsat Core Track)

    Competition results for the NIA - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    NIA (Unsat Core Track)

    Sequential PerformanceParallel Performance —— - - + + @@ -126,7 +126,7 @@

    NIA (Unsat Core Track)

    - + CVC4-uc 0 0 @@ -137,7 +137,7 @@

    NIA (Unsat Core Track)

    - + Z3n 0 0 @@ -148,7 +148,7 @@

    NIA (Unsat Core Track)

    - + UltimateEliminator+Yices-2.6.1 0 0 @@ -159,7 +159,7 @@

    NIA (Unsat Core Track)

    - + 2018-CVC4 (unsat core)n 0 0 @@ -170,7 +170,7 @@

    NIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 0 @@ -192,7 +192,7 @@

    NIA (Unsat Core Track)

    - + CVC4-uc 0 00.2680.2670 @@ -201,7 +201,7 @@

    NIA (Unsat Core Track)

    - + Z3n 0 00.5270.5270 @@ -210,7 +210,7 @@

    NIA (Unsat Core Track)

    - + UltimateEliminator+Yices-2.6.1 0 06.54.4610 @@ -219,7 +219,7 @@

    NIA (Unsat Core Track)

    - + 2018-CVC4 (unsat core)n 0 02400.0462400.0461 @@ -228,7 +228,7 @@

    NIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 02490.9112463.9061 @@ -252,7 +252,6 @@

    NIA (Unsat Core Track)

    - + - diff --git a/archive/2019/results/nra-single-query.html b/archive/2019/results/nra-single-query.html index 3553a988..33dae576 100644 --- a/archive/2019/results/nra-single-query.html +++ b/archive/2019/results/nra-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    NRA (Single Query Track)

    Competition results for the NRA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    NRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Par4Par4Par4 - - + + Par4 - - + + Par4 - + @@ -131,7 +131,7 @@

    NRA (Single Query Track)

    - + Par4 0 86 @@ -142,7 +142,7 @@

    NRA (Single Query Track)

    - + Vampire 0 82 @@ -153,7 +153,7 @@

    NRA (Single Query Track)

    - + 2018-Z3n 0 82 @@ -164,7 +164,7 @@

    NRA (Single Query Track)

    - + Z3n 0 82 @@ -175,7 +175,7 @@

    NRA (Single Query Track)

    - + 2018-Vampiren 0 74 @@ -186,7 +186,7 @@

    NRA (Single Query Track)

    - + CVC4 0 62 @@ -197,7 +197,7 @@

    NRA (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 1 @@ -208,7 +208,7 @@

    NRA (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 0 @@ -230,7 +230,7 @@

    NRA (Single Query Track)

    - + Par4 0 8718560.05116488.7118718666 @@ -239,7 +239,7 @@

    NRA (Single Query Track)

    - + Vampire 0 8227315.09526640.396820821111 @@ -248,7 +248,7 @@

    NRA (Single Query Track)

    - + 2018-Z3n 0 8227826.61227826.808821811111 @@ -257,7 +257,7 @@

    NRA (Single Query Track)

    - + Z3n 0 8227936.01727936.048821811111 @@ -266,7 +266,7 @@

    NRA (Single Query Track)

    - + 2018-Vampiren 0 8166169.47238212.969810811212 @@ -275,7 +275,7 @@

    NRA (Single Query Track)

    - + CVC4 0 6269732.90569733.749620623129 @@ -284,7 +284,7 @@

    NRA (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 1324.94223.03101920 @@ -293,7 +293,7 @@

    NRA (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 0317.696223.112000930 @@ -313,7 +313,7 @@

    NRA (Single Query Track)

    - + Z3n 0 12400.0512400.0521109211 @@ -322,7 +322,7 @@

    NRA (Single Query Track)

    - + 2018-Z3n 0 12400.0582400.0581109211 @@ -331,7 +331,7 @@

    NRA (Single Query Track)

    - + Par4 0 12400.0062400.065110926 @@ -340,7 +340,7 @@

    NRA (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 06.794.82000930 @@ -349,7 +349,7 @@

    NRA (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 06.9875.108000930 @@ -358,7 +358,7 @@

    NRA (Single Query Track)

    - + 2018-Vampiren 0 04800.04800.00009312 @@ -367,7 +367,7 @@

    NRA (Single Query Track)

    - + CVC4 0 04800.04800.00009329 @@ -376,7 +376,7 @@

    NRA (Single Query Track)

    - + Vampire 0 04800.04800.00009311 @@ -396,7 +396,7 @@

    NRA (Single Query Track)

    - + Par4 0 8616160.04514088.6478608676 @@ -405,7 +405,7 @@

    NRA (Single Query Track)

    - + Vampire 0 8222515.09521840.396820821111 @@ -414,7 +414,7 @@

    NRA (Single Query Track)

    - + 2018-Z3n 0 8125426.55425426.75810811211 @@ -423,7 +423,7 @@

    NRA (Single Query Track)

    - + Z3n 0 8125535.96625535.997810811211 @@ -432,7 +432,7 @@

    NRA (Single Query Track)

    - + 2018-Vampiren 0 8161369.47233412.969810811212 @@ -441,7 +441,7 @@

    NRA (Single Query Track)

    - + CVC4 0 6264932.90564933.749620623129 @@ -450,7 +450,7 @@

    NRA (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 1317.953217.922101920 @@ -459,7 +459,7 @@

    NRA (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 0310.906218.292000930 @@ -479,7 +479,7 @@

    NRA (Single Query Track)

    - + Par4 0 85203.011201.2128518488 @@ -488,7 +488,7 @@

    NRA (Single Query Track)

    - + Z3n 0 81303.117303.118811801212 @@ -497,7 +497,7 @@

    NRA (Single Query Track)

    - + 2018-Z3n 0 81303.592303.598811801212 @@ -506,7 +506,7 @@

    NRA (Single Query Track)

    - + Vampire 0 78550.915417.885780781515 @@ -515,7 +515,7 @@

    NRA (Single Query Track)

    - + CVC4 0 62756.462756.441620623131 @@ -524,7 +524,7 @@

    NRA (Single Query Track)

    - + 2018-Vampiren 0 611428.382938.938610613232 @@ -533,7 +533,7 @@

    NRA (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 1324.94223.03101920 @@ -542,7 +542,7 @@

    NRA (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 0317.696223.112000930 @@ -566,7 +566,6 @@

    NRA (Single Query Track)

    - + - diff --git a/archive/2019/results/qf-abv-challenge-incremental.html b/archive/2019/results/qf-abv-challenge-incremental.html index 426366eb..e69bea71 100644 --- a/archive/2019/results/qf-abv-challenge-incremental.html +++ b/archive/2019/results/qf-abv-challenge-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ABV (Challenge Track (incremental))

    Competition results for the QF_ABV - + division - + in the Challenge Track (incremental).

    @@ -104,8 +104,8 @@

    QF_ABV (Challenge Track (incremental))

    Parallel Performance Boolector (incremental) - - + + @@ -124,7 +124,7 @@

    QF_ABV (Challenge Track (incremental))

    - + Boolector (incremental) 0 13510299.75210298.92700 @@ -133,7 +133,7 @@

    QF_ABV (Challenge Track (incremental))

    - + 2018-Boolector (incremental)n 0 13355370.9455373.6620 @@ -142,7 +142,7 @@

    QF_ABV (Challenge Track (incremental))

    - + Yices 2.6.2 Incremental 0 130147867.07147872.7252 @@ -151,7 +151,7 @@

    QF_ABV (Challenge Track (incremental))

    - + Z3n 0 119332366.8332380.4167 @@ -160,7 +160,7 @@

    QF_ABV (Challenge Track (incremental))

    - + CVC4-inc 0 89388787.5388800.0469 @@ -184,7 +184,6 @@

    QF_ABV (Challenge Track (incremental))

    - + - diff --git a/archive/2019/results/qf-abv-challenge-non-incremental.html b/archive/2019/results/qf-abv-challenge-non-incremental.html index 894ed451..eb975029 100644 --- a/archive/2019/results/qf-abv-challenge-non-incremental.html +++ b/archive/2019/results/qf-abv-challenge-non-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ABV (Challenge Track (non-incremental))

    Competition results for the QF_ABV - + division - + in the Challenge Track (non-incremental).

    @@ -104,13 +104,13 @@

    QF_ABV (Challenge Track (non-incremental))

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) BoolectorBoolectorBoolector - - + + — - - + + Poolector - + @@ -131,7 +131,7 @@

    QF_ABV (Challenge Track (non-incremental))

    - + Boolector 0 7 @@ -142,7 +142,7 @@

    QF_ABV (Challenge Track (non-incremental))

    - + Poolector 0 7 @@ -153,7 +153,7 @@

    QF_ABV (Challenge Track (non-incremental))

    - + Yices 2.6.2 0 7 @@ -164,7 +164,7 @@

    QF_ABV (Challenge Track (non-incremental))

    - + Z3n 0 5 @@ -175,7 +175,7 @@

    QF_ABV (Challenge Track (non-incremental))

    - + 2018-Boolectorn 0 1 @@ -186,7 +186,7 @@

    QF_ABV (Challenge Track (non-incremental))

    - + CVC4 0 0 @@ -208,7 +208,7 @@

    QF_ABV (Challenge Track (non-incremental))

    - + Boolector 0 7258.318258.3377000 @@ -217,7 +217,7 @@

    QF_ABV (Challenge Track (non-incremental))

    - + Poolector 0 7970.72270.96577000 @@ -226,7 +226,7 @@

    QF_ABV (Challenge Track (non-incremental))

    - + Yices 2.6.2 0 74255.1264255.69777000 @@ -235,7 +235,7 @@

    QF_ABV (Challenge Track (non-incremental))

    - + Z3n 0 5151572.322151576.2255022 @@ -244,7 +244,7 @@

    QF_ABV (Challenge Track (non-incremental))

    - + 2018-Boolectorn 0 1106.1107.15711060 @@ -253,7 +253,7 @@

    QF_ABV (Challenge Track (non-incremental))

    - + CVC4 0 0302306.4302400.000077 @@ -273,7 +273,7 @@

    QF_ABV (Challenge Track (non-incremental))

    - + Boolector 0 7258.318258.3377000 @@ -282,7 +282,7 @@

    QF_ABV (Challenge Track (non-incremental))

    - + Poolector 0 7970.72270.96577000 @@ -291,7 +291,7 @@

    QF_ABV (Challenge Track (non-incremental))

    - + Yices 2.6.2 0 74255.1264255.69777000 @@ -300,7 +300,7 @@

    QF_ABV (Challenge Track (non-incremental))

    - + Z3n 0 5151572.322151576.2255022 @@ -309,7 +309,7 @@

    QF_ABV (Challenge Track (non-incremental))

    - + 2018-Boolectorn 0 1106.1107.15711060 @@ -318,7 +318,7 @@

    QF_ABV (Challenge Track (non-incremental))

    - + CVC4 0 0302306.4302400.000077 @@ -338,7 +338,7 @@

    QF_ABV (Challenge Track (non-incremental))

    - + 2018-Boolectorn 0 00.00.000070 @@ -347,7 +347,7 @@

    QF_ABV (Challenge Track (non-incremental))

    - + Boolector 0 00.00.000070 @@ -356,7 +356,7 @@

    QF_ABV (Challenge Track (non-incremental))

    - + CVC4 0 00.00.000077 @@ -365,7 +365,7 @@

    QF_ABV (Challenge Track (non-incremental))

    - + Poolector 0 00.00.000070 @@ -374,7 +374,7 @@

    QF_ABV (Challenge Track (non-incremental))

    - + Yices 2.6.2 0 00.00.000070 @@ -383,7 +383,7 @@

    QF_ABV (Challenge Track (non-incremental))

    - + Z3n 0 00.00.000072 @@ -403,7 +403,7 @@

    QF_ABV (Challenge Track (non-incremental))

    - + Poolector 0 4313.123144.97244033 @@ -412,7 +412,7 @@

    QF_ABV (Challenge Track (non-incremental))

    - + Boolector 0 3143.778143.78133044 @@ -421,7 +421,7 @@

    QF_ABV (Challenge Track (non-incremental))

    - + 2018-Boolectorn 0 037.26938.32200071 @@ -430,7 +430,7 @@

    QF_ABV (Challenge Track (non-incremental))

    - + CVC4 0 0168.0168.000077 @@ -439,7 +439,7 @@

    QF_ABV (Challenge Track (non-incremental))

    - + Yices 2.6.2 0 0168.0168.000077 @@ -448,7 +448,7 @@

    QF_ABV (Challenge Track (non-incremental))

    - + Z3n 0 0168.0168.000077 @@ -472,7 +472,6 @@

    QF_ABV (Challenge Track (non-incremental))

    - + - diff --git a/archive/2019/results/qf-abv-incremental.html b/archive/2019/results/qf-abv-incremental.html index 1724860e..0b91765f 100644 --- a/archive/2019/results/qf-abv-incremental.html +++ b/archive/2019/results/qf-abv-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ABV (Incremental Track)

    Competition results for the QF_ABV - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_ABV (Incremental Track)

    Parallel Performance Yices 2.6.2 Incremental - - + + @@ -124,7 +124,7 @@

    QF_ABV (Incremental Track)

    - + 2018-Boolector (incremental)n 0 140543314.70143195.34296 @@ -133,7 +133,7 @@

    QF_ABV (Incremental Track)

    - + Yices 2.6.2 Incremental 0 140118497.16518462.896137 @@ -142,7 +142,7 @@

    QF_ABV (Incremental Track)

    - + Boolector (incremental) 0 139760950.00460901.9211717 @@ -151,7 +151,7 @@

    QF_ABV (Incremental Track)

    - + CVC4-inc 0 124847224.67247162.89916618 @@ -160,7 +160,7 @@

    QF_ABV (Incremental Track)

    - + Z3n 0 1243324608.121324543.857171116 @@ -184,7 +184,6 @@

    QF_ABV (Incremental Track)

    - + - diff --git a/archive/2019/results/qf-abv-single-query.html b/archive/2019/results/qf-abv-single-query.html index 1e4b1903..2927fb4e 100644 --- a/archive/2019/results/qf-abv-single-query.html +++ b/archive/2019/results/qf-abv-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ABV (Single Query Track)

    Competition results for the QF_ABV - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_ABV (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) BoolectorPar4Poolector - - + + Par4 - - + + Par4 - + @@ -131,7 +131,7 @@

    QF_ABV (Single Query Track)

    - + Boolector 0 7520 @@ -142,7 +142,7 @@

    QF_ABV (Single Query Track)

    - + Par4 0 7518 @@ -153,7 +153,7 @@

    QF_ABV (Single Query Track)

    - + 2018-Boolectorn 0 7515 @@ -164,7 +164,7 @@

    QF_ABV (Single Query Track)

    - + Yices 2.6.2 0 7512 @@ -175,7 +175,7 @@

    QF_ABV (Single Query Track)

    - + Poolector 0 7512 @@ -186,7 +186,7 @@

    QF_ABV (Single Query Track)

    - + CVC4 0 7458 @@ -197,7 +197,7 @@

    QF_ABV (Single Query Track)

    - + Z3n 0 7455 @@ -219,7 +219,7 @@

    QF_ABV (Single Query Track)

    - + Par4 0 752068137.2551792.2617520515923611818 @@ -228,7 +228,7 @@

    QF_ABV (Single Query Track)

    - + Poolector 0 7520111313.52861433.347520516023601818 @@ -237,7 +237,7 @@

    QF_ABV (Single Query Track)

    - + Boolector 0 752063913.33763882.4447520516023601818 @@ -246,7 +246,7 @@

    QF_ABV (Single Query Track)

    - + 2018-Boolectorn 0 751576019.58775989.7747515515523602323 @@ -255,7 +255,7 @@

    QF_ABV (Single Query Track)

    - + Yices 2.6.2 0 751293513.61393547.6977512515823542626 @@ -264,7 +264,7 @@

    QF_ABV (Single Query Track)

    - + CVC4 0 7458207477.384207617.3837458513623228080 @@ -273,7 +273,7 @@

    QF_ABV (Single Query Track)

    - + Z3n 0 7455229893.035229875.1997455513023258383 @@ -293,7 +293,7 @@

    QF_ABV (Single Query Track)

    - + Poolector 0 516015716.9844678.765516051600237818 @@ -302,7 +302,7 @@

    QF_ABV (Single Query Track)

    - + Boolector 0 516010128.29110098.744516051600237818 @@ -311,7 +311,7 @@

    QF_ABV (Single Query Track)

    - + Par4 0 51599951.0115110.068515951590237918 @@ -320,7 +320,7 @@

    QF_ABV (Single Query Track)

    - + Yices 2.6.2 0 515816487.62616508.882515851580238026 @@ -329,7 +329,7 @@

    QF_ABV (Single Query Track)

    - + 2018-Boolectorn 0 515520655.98420628.543515551550238323 @@ -338,7 +338,7 @@

    QF_ABV (Single Query Track)

    - + CVC4 0 513666042.95166180.52513651360240280 @@ -347,7 +347,7 @@

    QF_ABV (Single Query Track)

    - + Z3n 0 513081866.50581860.238513051300240883 @@ -367,7 +367,7 @@

    QF_ABV (Single Query Track)

    - + Par4 0 236158186.2446682.193236102361517718 @@ -376,7 +376,7 @@

    QF_ABV (Single Query Track)

    - + Boolector 0 236053785.04653783.7236002360517818 @@ -385,7 +385,7 @@

    QF_ABV (Single Query Track)

    - + 2018-Boolectorn 0 236055363.60355361.231236002360517823 @@ -394,7 +394,7 @@

    QF_ABV (Single Query Track)

    - + Poolector 0 236095596.54556754.575236002360517818 @@ -403,7 +403,7 @@

    QF_ABV (Single Query Track)

    - + Yices 2.6.2 0 235477025.98877038.815235402354518426 @@ -412,7 +412,7 @@

    QF_ABV (Single Query Track)

    - + Z3n 0 2325148026.53148014.961232502325521383 @@ -421,7 +421,7 @@

    QF_ABV (Single Query Track)

    - + CVC4 0 2322141434.432141436.862232202322521680 @@ -441,7 +441,7 @@

    QF_ABV (Single Query Track)

    - + Par4 0 74763757.632444.5257476514023366262 @@ -450,7 +450,7 @@

    QF_ABV (Single Query Track)

    - + Poolector 0 74607054.4154218.697460513723237878 @@ -459,7 +459,7 @@

    QF_ABV (Single Query Track)

    - + Boolector 0 74403736.1263701.7367440511723239898 @@ -468,7 +468,7 @@

    QF_ABV (Single Query Track)

    - + Yices 2.6.2 0 74274123.5374154.32742751172310111111 @@ -477,7 +477,7 @@

    QF_ABV (Single Query Track)

    - + 2018-Boolectorn 0 74254872.7154871.149742551082317113113 @@ -486,7 +486,7 @@

    QF_ABV (Single Query Track)

    - + CVC4 0 73926209.5736191.473739250982294146146 @@ -495,7 +495,7 @@

    QF_ABV (Single Query Track)

    - + Z3n 0 73446575.1886553.871734450642280194194 @@ -519,7 +519,6 @@

    QF_ABV (Single Query Track)

    - + - diff --git a/archive/2019/results/qf-abv-unsat-core.html b/archive/2019/results/qf-abv-unsat-core.html index bb2b7597..600c7dd6 100644 --- a/archive/2019/results/qf-abv-unsat-core.html +++ b/archive/2019/results/qf-abv-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ABV (Unsat Core Track)

    Competition results for the QF_ABV - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_ABV (Unsat Core Track)

    Sequential PerformanceParallel Performance Yices 2.6.2Yices 2.6.2 - - + + @@ -126,7 +126,7 @@

    QF_ABV (Unsat Core Track)

    - + Z3n 0 189583 @@ -137,7 +137,7 @@

    QF_ABV (Unsat Core Track)

    - + 2018-Yices (unsat core)n 0 187918 @@ -148,7 +148,7 @@

    QF_ABV (Unsat Core Track)

    - + Yices 2.6.2 0 187871 @@ -159,7 +159,7 @@

    QF_ABV (Unsat Core Track)

    - + CVC4-uc 0 173705 @@ -181,7 +181,7 @@

    QF_ABV (Unsat Core Track)

    - + Z3n 0 18958366036.45866038.10221 @@ -190,7 +190,7 @@

    QF_ABV (Unsat Core Track)

    - + 2018-Yices (unsat core)n 0 18791846608.18546621.83213 @@ -199,7 +199,7 @@

    QF_ABV (Unsat Core Track)

    - + Yices 2.6.2 0 18787148871.36648869.3914 @@ -208,7 +208,7 @@

    QF_ABV (Unsat Core Track)

    - + CVC4-uc 0 17370582529.05682507.56930 @@ -232,7 +232,6 @@

    QF_ABV (Unsat Core Track)

    - + - diff --git a/archive/2019/results/qf-alia-incremental.html b/archive/2019/results/qf-alia-incremental.html index e0370188..cb7ca798 100644 --- a/archive/2019/results/qf-alia-incremental.html +++ b/archive/2019/results/qf-alia-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ALIA (Incremental Track)

    Competition results for the QF_ALIA - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_ALIA (Incremental Track)

    Parallel Performance SMTInterpol - - + + @@ -124,7 +124,7 @@

    QF_ALIA (Incremental Track)

    - + 2018-Z3 (incremental)n 0 530398923.558841.66100 @@ -133,7 +133,7 @@

    QF_ALIA (Incremental Track)

    - + Z3n 0 530398933.481854.4600 @@ -142,7 +142,7 @@

    QF_ALIA (Incremental Track)

    - + SMTInterpol 0 5303982563.3521609.97100 @@ -151,7 +151,7 @@

    QF_ALIA (Incremental Track)

    - + Yices 2.6.2 Incremental 0 5303715144.4995076.994272 @@ -160,7 +160,7 @@

    QF_ALIA (Incremental Track)

    - + CVC4-inc 0 50675611793.13711675.296236424 @@ -184,7 +184,6 @@

    QF_ALIA (Incremental Track)

    - + - diff --git a/archive/2019/results/qf-alia-single-query.html b/archive/2019/results/qf-alia-single-query.html index c494a86e..ab9eadbb 100644 --- a/archive/2019/results/qf-alia-single-query.html +++ b/archive/2019/results/qf-alia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ALIA (Single Query Track)

    Competition results for the QF_ALIA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_ALIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Yices 2.6.2Yices 2.6.2Yices 2.6.2 - - + + Yices 2.6.2 - - + + Yices 2.6.2 - + @@ -131,7 +131,7 @@

    QF_ALIA (Single Query Track)

    - + Yices 2.6.2 0 139 @@ -142,7 +142,7 @@

    QF_ALIA (Single Query Track)

    - + 2018-Yicesn 0 139 @@ -153,7 +153,7 @@

    QF_ALIA (Single Query Track)

    - + SMTInterpol 0 139 @@ -164,7 +164,7 @@

    QF_ALIA (Single Query Track)

    - + CVC4 0 138 @@ -175,7 +175,7 @@

    QF_ALIA (Single Query Track)

    - + Z3n 0 134 @@ -186,7 +186,7 @@

    QF_ALIA (Single Query Track)

    - + Alt-Ergo 0 70 @@ -197,7 +197,7 @@

    QF_ALIA (Single Query Track)

    - + veriT 0 16 @@ -219,7 +219,7 @@

    QF_ALIA (Single Query Track)

    - + Yices 2.6.2 0 13957.78457.948139598000 @@ -228,7 +228,7 @@

    QF_ALIA (Single Query Track)

    - + 2018-Yicesn 0 13961.48161.647139598000 @@ -237,7 +237,7 @@

    QF_ALIA (Single Query Track)

    - + SMTInterpol 0 139849.261413.676139598000 @@ -246,7 +246,7 @@

    QF_ALIA (Single Query Track)

    - + CVC4 0 1385921.1545922.007138597911 @@ -255,7 +255,7 @@

    QF_ALIA (Single Query Track)

    - + Z3n 0 13413293.5713289.869134548055 @@ -264,7 +264,7 @@

    QF_ALIA (Single Query Track)

    - + Alt-Ergo 0 71154452.518129673.616710716849 @@ -273,7 +273,7 @@

    QF_ALIA (Single Query Track)

    - + veriT 0 167.6457.606160161230 @@ -293,7 +293,7 @@

    QF_ALIA (Single Query Track)

    - + Yices 2.6.2 0 5951.41651.4259590800 @@ -302,7 +302,7 @@

    QF_ALIA (Single Query Track)

    - + 2018-Yicesn 0 5954.83154.84359590800 @@ -311,7 +311,7 @@

    QF_ALIA (Single Query Track)

    - + SMTInterpol 0 59450.646179.27859590800 @@ -320,7 +320,7 @@

    QF_ALIA (Single Query Track)

    - + CVC4 0 591940.4081941.10859590801 @@ -329,7 +329,7 @@

    QF_ALIA (Single Query Track)

    - + Z3n 0 5413203.6713199.96854540855 @@ -338,7 +338,7 @@

    QF_ALIA (Single Query Track)

    - + veriT 0 03.7533.7370001390 @@ -347,7 +347,7 @@

    QF_ALIA (Single Query Track)

    - + Alt-Ergo 0 0121389.831105180.36700013949 @@ -367,7 +367,7 @@

    QF_ALIA (Single Query Track)

    - + Yices 2.6.2 0 806.3676.52880080590 @@ -376,7 +376,7 @@

    QF_ALIA (Single Query Track)

    - + 2018-Yicesn 0 806.656.80580080590 @@ -385,7 +385,7 @@

    QF_ALIA (Single Query Track)

    - + Z3n 0 8089.989.90180080595 @@ -394,7 +394,7 @@

    QF_ALIA (Single Query Track)

    - + SMTInterpol 0 80398.615234.39980080590 @@ -403,7 +403,7 @@

    QF_ALIA (Single Query Track)

    - + CVC4 0 793980.7463980.89979079601 @@ -412,7 +412,7 @@

    QF_ALIA (Single Query Track)

    - + Alt-Ergo 0 7133062.68724493.249710716849 @@ -421,7 +421,7 @@

    QF_ALIA (Single Query Track)

    - + veriT 0 163.8923.87160161230 @@ -441,7 +441,7 @@

    QF_ALIA (Single Query Track)

    - + Yices 2.6.2 0 13957.78457.948139598000 @@ -450,7 +450,7 @@

    QF_ALIA (Single Query Track)

    - + 2018-Yicesn 0 13961.48161.647139598000 @@ -459,7 +459,7 @@

    QF_ALIA (Single Query Track)

    - + SMTInterpol 0 136767.209367.851136597733 @@ -468,7 +468,7 @@

    QF_ALIA (Single Query Track)

    - + Z3n 0 118666.767663.00711839792121 @@ -477,7 +477,7 @@

    QF_ALIA (Single Query Track)

    - + CVC4 0 116634.852634.81911645712323 @@ -486,7 +486,7 @@

    QF_ALIA (Single Query Track)

    - + Alt-Ergo 0 662784.921960.728660667369 @@ -495,7 +495,7 @@

    QF_ALIA (Single Query Track)

    - + veriT 0 167.6457.606160161230 @@ -519,7 +519,6 @@

    QF_ALIA (Single Query Track)

    - + - diff --git a/archive/2019/results/qf-alia-unsat-core.html b/archive/2019/results/qf-alia-unsat-core.html index 5540ac3e..501cb649 100644 --- a/archive/2019/results/qf-alia-unsat-core.html +++ b/archive/2019/results/qf-alia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ALIA (Unsat Core Track)

    Competition results for the QF_ALIA - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_ALIA (Unsat Core Track)

    Sequential PerformanceParallel Performance Yices 2.6.2Yices 2.6.2 - - + + @@ -126,7 +126,7 @@

    QF_ALIA (Unsat Core Track)

    - + 2018-Z3 (unsat core)n 0 3117 @@ -137,7 +137,7 @@

    QF_ALIA (Unsat Core Track)

    - + Z3n 0 3117 @@ -148,7 +148,7 @@

    QF_ALIA (Unsat Core Track)

    - + Yices 2.6.2 0 2978 @@ -159,7 +159,7 @@

    QF_ALIA (Unsat Core Track)

    - + SMTInterpol 0 2893 @@ -170,7 +170,7 @@

    QF_ALIA (Unsat Core Track)

    - + CVC4-uc 0 2718 @@ -192,7 +192,7 @@

    QF_ALIA (Unsat Core Track)

    - + 2018-Z3 (unsat core)n 0 31172.0742.0770 @@ -201,7 +201,7 @@

    QF_ALIA (Unsat Core Track)

    - + Z3n 0 31172.1212.1220 @@ -210,7 +210,7 @@

    QF_ALIA (Unsat Core Track)

    - + Yices 2.6.2 0 29780.4150.5110 @@ -219,7 +219,7 @@

    QF_ALIA (Unsat Core Track)

    - + SMTInterpol 0 289360.50625.510 @@ -228,7 +228,7 @@

    QF_ALIA (Unsat Core Track)

    - + CVC4-uc 0 27185381.4945381.7661 @@ -252,7 +252,6 @@

    QF_ALIA (Unsat Core Track)

    - + - diff --git a/archive/2019/results/qf-ania-incremental.html b/archive/2019/results/qf-ania-incremental.html index bad8b1a8..6ced69ed 100644 --- a/archive/2019/results/qf-ania-incremental.html +++ b/archive/2019/results/qf-ania-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ANIA (Incremental Track)

    Competition results for the QF_ANIA - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_ANIA (Incremental Track)

    Parallel Performance CVC4-inc - - + + @@ -124,7 +124,7 @@

    QF_ANIA (Incremental Track)

    - + 2018-Z3 (incremental)n 0 5172456.62453.17800 @@ -133,7 +133,7 @@

    QF_ANIA (Incremental Track)

    - + Z3n 0 5172459.40656.02800 @@ -142,7 +142,7 @@

    QF_ANIA (Incremental Track)

    - + CVC4-inc 0 485032822.6212818.97432211 @@ -151,7 +151,7 @@

    QF_ANIA (Incremental Track)

    - + MathSAT-default 0 1210112.54712.005396230 @@ -160,7 +160,7 @@

    QF_ANIA (Incremental Track)

    - + MathSAT-na-ext 0 1210114.01813.442396230 @@ -184,7 +184,6 @@

    QF_ANIA (Incremental Track)

    - + - diff --git a/archive/2019/results/qf-ania-single-query.html b/archive/2019/results/qf-ania-single-query.html index 7173d5aa..8d547674 100644 --- a/archive/2019/results/qf-ania-single-query.html +++ b/archive/2019/results/qf-ania-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ANIA (Single Query Track)

    Competition results for the QF_ANIA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_ANIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) CVC4CVC4— - - + + CVC4 - - + + MathSAT-na-ext - + @@ -131,7 +131,7 @@

    QF_ANIA (Single Query Track)

    - + CVC4 0 7 @@ -142,7 +142,7 @@

    QF_ANIA (Single Query Track)

    - + MathSAT-na-ext 0 7 @@ -153,7 +153,7 @@

    QF_ANIA (Single Query Track)

    - + MathSAT-default 0 7 @@ -164,7 +164,7 @@

    QF_ANIA (Single Query Track)

    - + 2018-Z3n 0 6 @@ -175,7 +175,7 @@

    QF_ANIA (Single Query Track)

    - + Z3n 0 6 @@ -186,7 +186,7 @@

    QF_ANIA (Single Query Track)

    - + Alt-Ergo 0 5 @@ -208,7 +208,7 @@

    QF_ANIA (Single Query Track)

    - + CVC4 0 7583.432583.570700 @@ -217,7 +217,7 @@

    QF_ANIA (Single Query Track)

    - + MathSAT-na-ext 0 7588.174588.16270700 @@ -226,7 +226,7 @@

    QF_ANIA (Single Query Track)

    - + MathSAT-default 0 7650.693650.71970700 @@ -235,7 +235,7 @@

    QF_ANIA (Single Query Track)

    - + 2018-Z3n 0 62410.1672410.16760611 @@ -244,7 +244,7 @@

    QF_ANIA (Single Query Track)

    - + Z3n 0 62548.7742548.81660611 @@ -253,7 +253,7 @@

    QF_ANIA (Single Query Track)

    - + Alt-Ergo 0 56248.825164.96550522 @@ -273,7 +273,7 @@

    QF_ANIA (Single Query Track)

    - + 2018-Z3n 0 00.00.000071 @@ -282,7 +282,7 @@

    QF_ANIA (Single Query Track)

    - + Alt-Ergo 0 00.00.000072 @@ -291,7 +291,7 @@

    QF_ANIA (Single Query Track)

    - + CVC4 0 00.00.000070 @@ -300,7 +300,7 @@

    QF_ANIA (Single Query Track)

    - + MathSAT-default 0 00.00.000070 @@ -309,7 +309,7 @@

    QF_ANIA (Single Query Track)

    - + MathSAT-na-ext 0 00.00.000070 @@ -318,7 +318,7 @@

    QF_ANIA (Single Query Track)

    - + Z3n 0 00.00.000071 @@ -338,7 +338,7 @@

    QF_ANIA (Single Query Track)

    - + CVC4 0 7583.432583.570700 @@ -347,7 +347,7 @@

    QF_ANIA (Single Query Track)

    - + MathSAT-na-ext 0 7588.174588.16270700 @@ -356,7 +356,7 @@

    QF_ANIA (Single Query Track)

    - + MathSAT-default 0 7650.693650.71970700 @@ -365,7 +365,7 @@

    QF_ANIA (Single Query Track)

    - + 2018-Z3n 0 62410.1672410.16760611 @@ -374,7 +374,7 @@

    QF_ANIA (Single Query Track)

    - + Z3n 0 62548.7742548.81660611 @@ -383,7 +383,7 @@

    QF_ANIA (Single Query Track)

    - + Alt-Ergo 0 56248.825164.96550522 @@ -403,7 +403,7 @@

    QF_ANIA (Single Query Track)

    - + 2018-Z3n 0 634.16734.16760611 @@ -412,7 +412,7 @@

    QF_ANIA (Single Query Track)

    - + Z3n 0 557.16657.16650522 @@ -421,7 +421,7 @@

    QF_ANIA (Single Query Track)

    - + MathSAT-na-ext 0 589.52689.52950522 @@ -430,7 +430,7 @@

    QF_ANIA (Single Query Track)

    - + MathSAT-default 0 5102.251102.25950522 @@ -439,7 +439,7 @@

    QF_ANIA (Single Query Track)

    - + CVC4 0 3105.459105.45930344 @@ -448,7 +448,7 @@

    QF_ANIA (Single Query Track)

    - + Alt-Ergo 0 3260.75137.62930344 @@ -472,7 +472,6 @@

    QF_ANIA (Single Query Track)

    - + - diff --git a/archive/2019/results/qf-ania-unsat-core.html b/archive/2019/results/qf-ania-unsat-core.html index 799df4a2..38251847 100644 --- a/archive/2019/results/qf-ania-unsat-core.html +++ b/archive/2019/results/qf-ania-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ANIA (Unsat Core Track)

    Competition results for the QF_ANIA - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_ANIA (Unsat Core Track)

    Sequential PerformanceParallel Performance CVC4-ucCVC4-uc - - + + @@ -126,7 +126,7 @@

    QF_ANIA (Unsat Core Track)

    - + 2018-CVC4 (unsat core)n 0 58897 @@ -137,7 +137,7 @@

    QF_ANIA (Unsat Core Track)

    - + CVC4-uc 0 58889 @@ -148,7 +148,7 @@

    QF_ANIA (Unsat Core Track)

    - + MathSAT-default 0 44932 @@ -159,7 +159,7 @@

    QF_ANIA (Unsat Core Track)

    - + MathSAT-na-ext 0 44049 @@ -170,7 +170,7 @@

    QF_ANIA (Unsat Core Track)

    - + Z3n 0 16037 @@ -192,7 +192,7 @@

    QF_ANIA (Unsat Core Track)

    - + 2018-CVC4 (unsat core)n 0 58897343.169343.2910 @@ -201,7 +201,7 @@

    QF_ANIA (Unsat Core Track)

    - + CVC4-uc 0 58889349.252349.280 @@ -210,7 +210,7 @@

    QF_ANIA (Unsat Core Track)

    - + MathSAT-default 0 4493210842.22510842.3544 @@ -219,7 +219,7 @@

    QF_ANIA (Unsat Core Track)

    - + MathSAT-na-ext 0 4404912863.13412863.1645 @@ -228,7 +228,7 @@

    QF_ANIA (Unsat Core Track)

    - + Z3n 0 160372410.3152410.3161 @@ -252,7 +252,6 @@

    QF_ANIA (Unsat Core Track)

    - + - diff --git a/archive/2019/results/qf-aufbv-challenge-incremental.html b/archive/2019/results/qf-aufbv-challenge-incremental.html index d54e6c59..21418bb9 100644 --- a/archive/2019/results/qf-aufbv-challenge-incremental.html +++ b/archive/2019/results/qf-aufbv-challenge-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AUFBV (Challenge Track (incremental))

    Competition results for the QF_AUFBV - + division - + in the Challenge Track (incremental).

    @@ -104,8 +104,8 @@

    QF_AUFBV (Challenge Track (incremental))

    Parallel Performance Yices 2.6.2 Incremental - - + + @@ -124,7 +124,7 @@

    QF_AUFBV (Challenge Track (incremental))

    - + 2018-Yices (incremental)n 0 8644966.92644969.74100 @@ -133,7 +133,7 @@

    QF_AUFBV (Challenge Track (incremental))

    - + Yices 2.6.2 Incremental 0 8654136.9154137.8400 @@ -142,7 +142,7 @@

    QF_AUFBV (Challenge Track (incremental))

    - + Z3n 0 77199789.4199791.094 @@ -151,7 +151,7 @@

    QF_AUFBV (Challenge Track (incremental))

    - + CVC4-inc 0 45259186.2259200.0416 @@ -160,7 +160,7 @@

    QF_AUFBV (Challenge Track (incremental))

    - + Boolector (incremental) 0 00.0810.285860 @@ -184,7 +184,6 @@

    QF_AUFBV (Challenge Track (incremental))

    - + - diff --git a/archive/2019/results/qf-aufbv-challenge-non-incremental.html b/archive/2019/results/qf-aufbv-challenge-non-incremental.html index 3b0fd0bf..7787abe2 100644 --- a/archive/2019/results/qf-aufbv-challenge-non-incremental.html +++ b/archive/2019/results/qf-aufbv-challenge-non-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AUFBV (Challenge Track (non-incremental))

    Competition results for the QF_AUFBV - + division - + in the Challenge Track (non-incremental).

    @@ -104,13 +104,13 @@

    QF_AUFBV (Challenge Track (non-incremental))

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Yices 2.6.2Yices 2.6.2Yices 2.6.2 - - + + Poolector - - + + Yices 2.6.2 - + @@ -131,7 +131,7 @@

    QF_AUFBV (Challenge Track (non-incremental))

    - + Yices 2.6.2 0 10 @@ -142,7 +142,7 @@

    QF_AUFBV (Challenge Track (non-incremental))

    - + Z3n 0 8 @@ -153,7 +153,7 @@

    QF_AUFBV (Challenge Track (non-incremental))

    - + Boolector 0 5 @@ -164,7 +164,7 @@

    QF_AUFBV (Challenge Track (non-incremental))

    - + Poolector 0 4 @@ -175,7 +175,7 @@

    QF_AUFBV (Challenge Track (non-incremental))

    - + 2018-CVC4n 0 3 @@ -186,7 +186,7 @@

    QF_AUFBV (Challenge Track (non-incremental))

    - + CVC4 0 3 @@ -208,7 +208,7 @@

    QF_AUFBV (Challenge Track (non-incremental))

    - + Yices 2.6.2 0 10226782.286226787.081109155 @@ -217,7 +217,7 @@

    QF_AUFBV (Challenge Track (non-incremental))

    - + Z3n 0 8259256.016259267.97887175 @@ -226,7 +226,7 @@

    QF_AUFBV (Challenge Track (non-incremental))

    - + Boolector 0 5173182.008173185.979541104 @@ -235,7 +235,7 @@

    QF_AUFBV (Challenge Track (non-incremental))

    - + Poolector 0 4216680.247216173.112431114 @@ -244,7 +244,7 @@

    QF_AUFBV (Challenge Track (non-incremental))

    - + 2018-CVC4n 0 3239236.442240991.273321124 @@ -253,7 +253,7 @@

    QF_AUFBV (Challenge Track (non-incremental))

    - + CVC4 0 3354389.765356633.23321127 @@ -273,7 +273,7 @@

    QF_AUFBV (Challenge Track (non-incremental))

    - + Yices 2.6.2 0 953963.20553969.599065 @@ -282,7 +282,7 @@

    QF_AUFBV (Challenge Track (non-incremental))

    - + Z3n 0 7124712.244124716.15477085 @@ -291,7 +291,7 @@

    QF_AUFBV (Challenge Track (non-incremental))

    - + Boolector 0 4351.731351.797440114 @@ -300,7 +300,7 @@

    QF_AUFBV (Challenge Track (non-incremental))

    - + Poolector 0 343820.62743357.957330124 @@ -309,7 +309,7 @@

    QF_AUFBV (Challenge Track (non-incremental))

    - + 2018-CVC4n 0 2209777.33210433.897220134 @@ -318,7 +318,7 @@

    QF_AUFBV (Challenge Track (non-incremental))

    - + CVC4 0 2316357.17317437.836220137 @@ -338,7 +338,7 @@

    QF_AUFBV (Challenge Track (non-incremental))

    - + Poolector 0 159.6215.156101144 @@ -347,7 +347,7 @@

    QF_AUFBV (Challenge Track (non-incremental))

    - + Yices 2.6.2 0 117.58217.581101145 @@ -356,7 +356,7 @@

    QF_AUFBV (Challenge Track (non-incremental))

    - + 2018-CVC4n 0 118.14218.146101144 @@ -365,7 +365,7 @@

    QF_AUFBV (Challenge Track (non-incremental))

    - + CVC4 0 118.64518.644101147 @@ -374,7 +374,7 @@

    QF_AUFBV (Challenge Track (non-incremental))

    - + Z3n 0 119.29219.293101145 @@ -383,7 +383,7 @@

    QF_AUFBV (Challenge Track (non-incremental))

    - + Boolector 0 134.17734.182101144 @@ -403,7 +403,7 @@

    QF_AUFBV (Challenge Track (non-incremental))

    - + Yices 2.6.2 0 3305.966305.9663211212 @@ -412,7 +412,7 @@

    QF_AUFBV (Challenge Track (non-incremental))

    - + Poolector 0 2229.367184.573211136 @@ -421,7 +421,7 @@

    QF_AUFBV (Challenge Track (non-incremental))

    - + Z3n 0 2331.916331.9182111313 @@ -430,7 +430,7 @@

    QF_AUFBV (Challenge Track (non-incremental))

    - + Boolector 0 1192.31192.31110148 @@ -439,7 +439,7 @@

    QF_AUFBV (Challenge Track (non-incremental))

    - + 2018-CVC4n 0 1354.142354.1461011414 @@ -448,7 +448,7 @@

    QF_AUFBV (Challenge Track (non-incremental))

    - + CVC4 0 1354.645354.6441011414 @@ -472,7 +472,6 @@

    QF_AUFBV (Challenge Track (non-incremental))

    - + - diff --git a/archive/2019/results/qf-aufbv-incremental.html b/archive/2019/results/qf-aufbv-incremental.html index ef0dbd85..b94968f4 100644 --- a/archive/2019/results/qf-aufbv-incremental.html +++ b/archive/2019/results/qf-aufbv-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AUFBV (Incremental Track)

    Competition results for the QF_AUFBV - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_AUFBV (Incremental Track)

    Parallel Performance Yices 2.6.2 Incremental - - + + @@ -124,7 +124,7 @@

    QF_AUFBV (Incremental Track)

    - + Yices 2.6.2 Incremental 0 8765946.2535946.00352 @@ -133,7 +133,7 @@

    QF_AUFBV (Incremental Track)

    - + 2018-Yices (incremental)n 0 8745874.485874.24772 @@ -142,7 +142,7 @@

    QF_AUFBV (Incremental Track)

    - + Z3n 0 80617486.52717487.115756 @@ -151,7 +151,7 @@

    QF_AUFBV (Incremental Track)

    - + CVC4-inc 0 65725189.38325189.06622410 @@ -160,7 +160,7 @@

    QF_AUFBV (Incremental Track)

    - + Boolector (incremental) 0 123169.761169.8927580 @@ -184,7 +184,6 @@

    QF_AUFBV (Incremental Track)

    - + - diff --git a/archive/2019/results/qf-aufbv-single-query.html b/archive/2019/results/qf-aufbv-single-query.html index c1397790..acaad660 100644 --- a/archive/2019/results/qf-aufbv-single-query.html +++ b/archive/2019/results/qf-aufbv-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AUFBV (Single Query Track)

    Competition results for the QF_AUFBV - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_AUFBV (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Yices 2.6.2Yices 2.6.2Yices 2.6.2 - - + + Yices 2.6.2 - - + + Yices 2.6.2 - + @@ -131,7 +131,7 @@

    QF_AUFBV (Single Query Track)

    - + Yices 2.6.2 0 38 @@ -142,7 +142,7 @@

    QF_AUFBV (Single Query Track)

    - + Z3n 0 38 @@ -153,7 +153,7 @@

    QF_AUFBV (Single Query Track)

    - + Boolector 0 32 @@ -164,7 +164,7 @@

    QF_AUFBV (Single Query Track)

    - + Poolector 0 32 @@ -175,7 +175,7 @@

    QF_AUFBV (Single Query Track)

    - + 2018-CVC4n 0 32 @@ -186,7 +186,7 @@

    QF_AUFBV (Single Query Track)

    - + CVC4 0 32 @@ -208,7 +208,7 @@

    QF_AUFBV (Single Query Track)

    - + Yices 2.6.2 0 387611.5147611.5683873133 @@ -217,7 +217,7 @@

    QF_AUFBV (Single Query Track)

    - + Z3n 0 389824.3079820.1233863233 @@ -226,7 +226,7 @@

    QF_AUFBV (Single Query Track)

    - + Boolector 0 3216.48416.4933262690 @@ -235,7 +235,7 @@

    QF_AUFBV (Single Query Track)

    - + Poolector 0 3246.70718.0853262690 @@ -244,7 +244,7 @@

    QF_AUFBV (Single Query Track)

    - + 2018-CVC4n 0 3223079.29423087.7593262699 @@ -253,7 +253,7 @@

    QF_AUFBV (Single Query Track)

    - + CVC4 0 3223647.85523656.933262699 @@ -273,7 +273,7 @@

    QF_AUFBV (Single Query Track)

    - + Yices 2.6.2 0 7235.901235.935770343 @@ -282,7 +282,7 @@

    QF_AUFBV (Single Query Track)

    - + Boolector 0 60.1540.159660350 @@ -291,7 +291,7 @@

    QF_AUFBV (Single Query Track)

    - + Poolector 0 61.2311.235660350 @@ -300,7 +300,7 @@

    QF_AUFBV (Single Query Track)

    - + Z3n 0 62400.4412400.441660353 @@ -309,7 +309,7 @@

    QF_AUFBV (Single Query Track)

    - + 2018-CVC4n 0 62400.5682400.566660359 @@ -318,7 +318,7 @@

    QF_AUFBV (Single Query Track)

    - + CVC4 0 62400.5852400.583660359 @@ -338,7 +338,7 @@

    QF_AUFBV (Single Query Track)

    - + Z3n 0 327423.8667419.6823203293 @@ -347,7 +347,7 @@

    QF_AUFBV (Single Query Track)

    - + Yices 2.6.2 0 317375.6137375.63331031103 @@ -356,7 +356,7 @@

    QF_AUFBV (Single Query Track)

    - + Boolector 0 2616.3316.33426026150 @@ -365,7 +365,7 @@

    QF_AUFBV (Single Query Track)

    - + Poolector 0 2645.47616.8526026150 @@ -374,7 +374,7 @@

    QF_AUFBV (Single Query Track)

    - + 2018-CVC4n 0 2620678.72520687.19226026159 @@ -383,7 +383,7 @@

    QF_AUFBV (Single Query Track)

    - + CVC4 0 2621247.2721256.34726026159 @@ -403,7 +403,7 @@

    QF_AUFBV (Single Query Track)

    - + Yices 2.6.2 0 35167.933167.943562966 @@ -412,7 +412,7 @@

    QF_AUFBV (Single Query Track)

    - + Boolector 0 3216.48416.4933262690 @@ -421,7 +421,7 @@

    QF_AUFBV (Single Query Track)

    - + Poolector 0 3246.70718.0853262690 @@ -430,7 +430,7 @@

    QF_AUFBV (Single Query Track)

    - + CVC4 0 31257.645257.64316251010 @@ -439,7 +439,7 @@

    QF_AUFBV (Single Query Track)

    - + 2018-CVC4n 0 31257.694257.689316251010 @@ -448,7 +448,7 @@

    QF_AUFBV (Single Query Track)

    - + Z3n 0 31264.868260.109316251010 @@ -472,7 +472,6 @@

    QF_AUFBV (Single Query Track)

    - + - diff --git a/archive/2019/results/qf-aufbv-unsat-core.html b/archive/2019/results/qf-aufbv-unsat-core.html index 13df49d0..da574af3 100644 --- a/archive/2019/results/qf-aufbv-unsat-core.html +++ b/archive/2019/results/qf-aufbv-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AUFBV (Unsat Core Track)

    Competition results for the QF_AUFBV - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_AUFBV (Unsat Core Track)

    Sequential PerformanceParallel Performance Yices 2.6.2Yices 2.6.2 - - + + @@ -126,7 +126,7 @@

    QF_AUFBV (Unsat Core Track)

    - + Yices 2.6.2 0 18216 @@ -137,7 +137,7 @@

    QF_AUFBV (Unsat Core Track)

    - + Z3n 0 16808 @@ -148,7 +148,7 @@

    QF_AUFBV (Unsat Core Track)

    - + 2018-MathSAT (unsat core)n 0 16698 @@ -159,7 +159,7 @@

    QF_AUFBV (Unsat Core Track)

    - + CVC4-uc 0 15227 @@ -181,7 +181,7 @@

    QF_AUFBV (Unsat Core Track)

    - + Yices 2.6.2 0 182168142.798142.8573 @@ -190,7 +190,7 @@

    QF_AUFBV (Unsat Core Track)

    - + Z3n 0 168087453.9677454.5312 @@ -199,7 +199,7 @@

    QF_AUFBV (Unsat Core Track)

    - + 2018-MathSAT (unsat core)n 0 1669815200.89815201.5094 @@ -208,7 +208,7 @@

    QF_AUFBV (Unsat Core Track)

    - + CVC4-uc 0 1522721651.64521651.6419 @@ -232,7 +232,6 @@

    QF_AUFBV (Unsat Core Track)

    - + - diff --git a/archive/2019/results/qf-aufbvnia-incremental.html b/archive/2019/results/qf-aufbvnia-incremental.html index 694777b8..3539a07b 100644 --- a/archive/2019/results/qf-aufbvnia-incremental.html +++ b/archive/2019/results/qf-aufbvnia-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AUFBVNIA (Incremental Track)

    Competition results for the QF_AUFBVNIA - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_AUFBVNIA (Incremental Track)

    Parallel Performance MathSAT-na-ext - - + + @@ -124,7 +124,7 @@

    QF_AUFBVNIA (Incremental Track)

    - + MathSAT-na-ext 0 763445.1393441.19800 @@ -133,7 +133,7 @@

    QF_AUFBVNIA (Incremental Track)

    - + MathSAT-default 0 753667.7023664.17411 @@ -142,7 +142,7 @@

    QF_AUFBVNIA (Incremental Track)

    - + CVC4-inc 0 6531841.88831839.2771111 @@ -151,7 +151,7 @@

    QF_AUFBVNIA (Incremental Track)

    - + Z3n 0 00.1280.478760 @@ -175,7 +175,6 @@

    QF_AUFBVNIA (Incremental Track)

    - + - diff --git a/archive/2019/results/qf-auflia-incremental.html b/archive/2019/results/qf-auflia-incremental.html index a6b8eaa4..5579a7f0 100644 --- a/archive/2019/results/qf-auflia-incremental.html +++ b/archive/2019/results/qf-auflia-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AUFLIA (Incremental Track)

    Competition results for the QF_AUFLIA - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_AUFLIA (Incremental Track)

    Parallel Performance Yices 2.6.2 Incremental - - + + @@ -124,7 +124,7 @@

    QF_AUFLIA (Incremental Track)

    - + 2018-Yices (incremental)n 0 46998643122.2432935.72500 @@ -133,7 +133,7 @@

    QF_AUFLIA (Incremental Track)

    - + Yices 2.6.2 Incremental 0 46998643213.4243029.45100 @@ -142,7 +142,7 @@

    QF_AUFLIA (Incremental Track)

    - + Z3n 0 469986410000.589796.6400 @@ -151,7 +151,7 @@

    QF_AUFLIA (Incremental Track)

    - + SMTInterpol 0 469986418585.17117119.4400 @@ -160,7 +160,7 @@

    QF_AUFLIA (Incremental Track)

    - + CVC4-inc 0 429146110556.1810343.2274084031 @@ -184,7 +184,6 @@

    QF_AUFLIA (Incremental Track)

    - + - diff --git a/archive/2019/results/qf-auflia-single-query.html b/archive/2019/results/qf-auflia-single-query.html index 589d7170..a481f9ca 100644 --- a/archive/2019/results/qf-auflia-single-query.html +++ b/archive/2019/results/qf-auflia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AUFLIA (Single Query Track)

    Competition results for the QF_AUFLIA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_AUFLIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Yices 2.6.2Yices 2.6.2Yices 2.6.2 - - + + Yices 2.6.2 - - + + Yices 2.6.2 - + @@ -131,7 +131,7 @@

    QF_AUFLIA (Single Query Track)

    - + Yices 2.6.2 0 651 @@ -142,7 +142,7 @@

    QF_AUFLIA (Single Query Track)

    - + 2018-Yicesn 0 651 @@ -153,7 +153,7 @@

    QF_AUFLIA (Single Query Track)

    - + Z3n 0 651 @@ -164,7 +164,7 @@

    QF_AUFLIA (Single Query Track)

    - + CVC4 0 651 @@ -175,7 +175,7 @@

    QF_AUFLIA (Single Query Track)

    - + SMTInterpol 0 651 @@ -186,7 +186,7 @@

    QF_AUFLIA (Single Query Track)

    - + Alt-Ergo 0 356 @@ -197,7 +197,7 @@

    QF_AUFLIA (Single Query Track)

    - + veriT 0 135 @@ -219,7 +219,7 @@

    QF_AUFLIA (Single Query Track)

    - + Yices 2.6.2 0 65117.32518.56865127237900 @@ -228,7 +228,7 @@

    QF_AUFLIA (Single Query Track)

    - + 2018-Yicesn 0 65118.67519.7865127237900 @@ -237,7 +237,7 @@

    QF_AUFLIA (Single Query Track)

    - + Z3n 0 65167.21167.27365127237900 @@ -246,7 +246,7 @@

    QF_AUFLIA (Single Query Track)

    - + SMTInterpol 0 651842.077442.22165127237900 @@ -255,7 +255,7 @@

    QF_AUFLIA (Single Query Track)

    - + CVC4 0 651711.378711.35965127237900 @@ -264,7 +264,7 @@

    QF_AUFLIA (Single Query Track)

    - + Alt-Ergo 0 365144880.67268654.513365036528617 @@ -273,7 +273,7 @@

    QF_AUFLIA (Single Query Track)

    - + veriT 0 1358.448.44513501355160 @@ -293,7 +293,7 @@

    QF_AUFLIA (Single Query Track)

    - + Yices 2.6.2 0 2725.6076.05927227203790 @@ -302,7 +302,7 @@

    QF_AUFLIA (Single Query Track)

    - + 2018-Yicesn 0 2726.0346.47627227203790 @@ -311,7 +311,7 @@

    QF_AUFLIA (Single Query Track)

    - + Z3n 0 27211.01611.02127227203790 @@ -320,7 +320,7 @@

    QF_AUFLIA (Single Query Track)

    - + CVC4 0 27265.85265.81227227203790 @@ -329,7 +329,7 @@

    QF_AUFLIA (Single Query Track)

    - + SMTInterpol 0 272224.046117.55127227203790 @@ -338,7 +338,7 @@

    QF_AUFLIA (Single Query Track)

    - + veriT 0 03.5133.5280006510 @@ -347,7 +347,7 @@

    QF_AUFLIA (Single Query Track)

    - + Alt-Ergo 0 029841.83316322.72700065117 @@ -367,7 +367,7 @@

    QF_AUFLIA (Single Query Track)

    - + Yices 2.6.2 0 37911.71712.50937903792720 @@ -376,7 +376,7 @@

    QF_AUFLIA (Single Query Track)

    - + 2018-Yicesn 0 37912.64213.30337903792720 @@ -385,7 +385,7 @@

    QF_AUFLIA (Single Query Track)

    - + Z3n 0 37956.19556.25337903792720 @@ -394,7 +394,7 @@

    QF_AUFLIA (Single Query Track)

    - + SMTInterpol 0 379618.031324.6737903792720 @@ -403,7 +403,7 @@

    QF_AUFLIA (Single Query Track)

    - + CVC4 0 379645.526645.54737903792720 @@ -412,7 +412,7 @@

    QF_AUFLIA (Single Query Track)

    - + Alt-Ergo 0 365115038.83952331.786365036528617 @@ -421,7 +421,7 @@

    QF_AUFLIA (Single Query Track)

    - + veriT 0 1354.9274.91713501355160 @@ -441,7 +441,7 @@

    QF_AUFLIA (Single Query Track)

    - + Yices 2.6.2 0 65117.32518.56865127237900 @@ -450,7 +450,7 @@

    QF_AUFLIA (Single Query Track)

    - + 2018-Yicesn 0 65118.67519.7865127237900 @@ -459,7 +459,7 @@

    QF_AUFLIA (Single Query Track)

    - + Z3n 0 65058.02458.08465027237811 @@ -468,7 +468,7 @@

    QF_AUFLIA (Single Query Track)

    - + SMTInterpol 0 650770.046383.13765027237811 @@ -477,7 +477,7 @@

    QF_AUFLIA (Single Query Track)

    - + CVC4 0 649208.725208.62464927237722 @@ -486,7 +486,7 @@

    QF_AUFLIA (Single Query Track)

    - + Alt-Ergo 0 3077537.4614198.0293070307344116 @@ -495,7 +495,7 @@

    QF_AUFLIA (Single Query Track)

    - + veriT 0 1358.448.44513501355160 @@ -519,7 +519,6 @@

    QF_AUFLIA (Single Query Track)

    - + - diff --git a/archive/2019/results/qf-auflia-unsat-core.html b/archive/2019/results/qf-auflia-unsat-core.html index dc814265..73b60aa2 100644 --- a/archive/2019/results/qf-auflia-unsat-core.html +++ b/archive/2019/results/qf-auflia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AUFLIA (Unsat Core Track)

    Competition results for the QF_AUFLIA - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_AUFLIA (Unsat Core Track)

    Sequential PerformanceParallel Performance CVC4-ucCVC4-uc - - + + @@ -126,7 +126,7 @@

    QF_AUFLIA (Unsat Core Track)

    - + 2018-CVC4 (unsat core)n 0 23094 @@ -137,7 +137,7 @@

    QF_AUFLIA (Unsat Core Track)

    - + CVC4-uc 0 23094 @@ -148,7 +148,7 @@

    QF_AUFLIA (Unsat Core Track)

    - + Z3n 0 22543 @@ -159,7 +159,7 @@

    QF_AUFLIA (Unsat Core Track)

    - + Yices 2.6.2 0 13585 @@ -170,7 +170,7 @@

    QF_AUFLIA (Unsat Core Track)

    - + SMTInterpol 0 1315 @@ -192,7 +192,7 @@

    QF_AUFLIA (Unsat Core Track)

    - + 2018-CVC4 (unsat core)n 0 23094222.731223.3710 @@ -201,7 +201,7 @@

    QF_AUFLIA (Unsat Core Track)

    - + CVC4-uc 0 23094229.755229.7180 @@ -210,7 +210,7 @@

    QF_AUFLIA (Unsat Core Track)

    - + Z3n 0 2254326.17726.180 @@ -219,7 +219,7 @@

    QF_AUFLIA (Unsat Core Track)

    - + Yices 2.6.2 0 1358514.64315.2220 @@ -228,7 +228,7 @@

    QF_AUFLIA (Unsat Core Track)

    - + SMTInterpol 0 1315483.042211.6970 @@ -252,7 +252,6 @@

    QF_AUFLIA (Unsat Core Track)

    - + - diff --git a/archive/2019/results/qf-aufnia-single-query.html b/archive/2019/results/qf-aufnia-single-query.html index 85144c4b..89a3a81e 100644 --- a/archive/2019/results/qf-aufnia-single-query.html +++ b/archive/2019/results/qf-aufnia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AUFNIA (Single Query Track)

    Competition results for the QF_AUFNIA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_AUFNIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) MathSAT-defaultMathSAT-defaultMathSAT-default - - + + MathSAT-na-ext - - + + MathSAT-default - + @@ -131,7 +131,7 @@

    QF_AUFNIA (Single Query Track)

    - + MathSAT-default 0 9 @@ -142,7 +142,7 @@

    QF_AUFNIA (Single Query Track)

    - + MathSAT-na-ext 0 9 @@ -153,7 +153,7 @@

    QF_AUFNIA (Single Query Track)

    - + 2018-Z3n 0 9 @@ -164,7 +164,7 @@

    QF_AUFNIA (Single Query Track)

    - + CVC4 0 9 @@ -175,7 +175,7 @@

    QF_AUFNIA (Single Query Track)

    - + Z3n 0 9 @@ -186,7 +186,7 @@

    QF_AUFNIA (Single Query Track)

    - + Alt-Ergo 0 6 @@ -208,7 +208,7 @@

    QF_AUFNIA (Single Query Track)

    - + MathSAT-default 0 93.5043.50592700 @@ -217,7 +217,7 @@

    QF_AUFNIA (Single Query Track)

    - + MathSAT-na-ext 0 93.653.6592700 @@ -226,7 +226,7 @@

    QF_AUFNIA (Single Query Track)

    - + 2018-Z3n 0 914.26714.26992700 @@ -235,7 +235,7 @@

    QF_AUFNIA (Single Query Track)

    - + CVC4 0 919.82619.82592700 @@ -244,7 +244,7 @@

    QF_AUFNIA (Single Query Track)

    - + Z3n 0 922.83322.83492700 @@ -253,7 +253,7 @@

    QF_AUFNIA (Single Query Track)

    - + Alt-Ergo 0 64365.7832911.81460631 @@ -273,7 +273,7 @@

    QF_AUFNIA (Single Query Track)

    - + MathSAT-default 0 20.4330.43322070 @@ -282,7 +282,7 @@

    QF_AUFNIA (Single Query Track)

    - + MathSAT-na-ext 0 20.5890.58922070 @@ -291,7 +291,7 @@

    QF_AUFNIA (Single Query Track)

    - + CVC4 0 22.9072.90722070 @@ -300,7 +300,7 @@

    QF_AUFNIA (Single Query Track)

    - + 2018-Z3n 0 213.413.40222070 @@ -309,7 +309,7 @@

    QF_AUFNIA (Single Query Track)

    - + Z3n 0 221.96121.96222070 @@ -318,7 +318,7 @@

    QF_AUFNIA (Single Query Track)

    - + Alt-Ergo 0 0199.42366.26900091 @@ -338,7 +338,7 @@

    QF_AUFNIA (Single Query Track)

    - + 2018-Z3n 0 70.8670.86770720 @@ -347,7 +347,7 @@

    QF_AUFNIA (Single Query Track)

    - + Z3n 0 70.8720.87270720 @@ -356,7 +356,7 @@

    QF_AUFNIA (Single Query Track)

    - + MathSAT-na-ext 0 73.063.0670720 @@ -365,7 +365,7 @@

    QF_AUFNIA (Single Query Track)

    - + MathSAT-default 0 73.0713.07170720 @@ -374,7 +374,7 @@

    QF_AUFNIA (Single Query Track)

    - + CVC4 0 716.91916.91870720 @@ -383,7 +383,7 @@

    QF_AUFNIA (Single Query Track)

    - + Alt-Ergo 0 64166.362845.54560631 @@ -403,7 +403,7 @@

    QF_AUFNIA (Single Query Track)

    - + MathSAT-default 0 93.5043.50592700 @@ -412,7 +412,7 @@

    QF_AUFNIA (Single Query Track)

    - + MathSAT-na-ext 0 93.653.6592700 @@ -421,7 +421,7 @@

    QF_AUFNIA (Single Query Track)

    - + 2018-Z3n 0 914.26714.26992700 @@ -430,7 +430,7 @@

    QF_AUFNIA (Single Query Track)

    - + CVC4 0 919.82619.82592700 @@ -439,7 +439,7 @@

    QF_AUFNIA (Single Query Track)

    - + Z3n 0 922.83322.83492700 @@ -448,7 +448,7 @@

    QF_AUFNIA (Single Query Track)

    - + Alt-Ergo 0 1222.7199.80410188 @@ -472,7 +472,6 @@

    QF_AUFNIA (Single Query Track)

    - + - diff --git a/archive/2019/results/qf-aufnia-unsat-core.html b/archive/2019/results/qf-aufnia-unsat-core.html index a67b6358..d903bfb7 100644 --- a/archive/2019/results/qf-aufnia-unsat-core.html +++ b/archive/2019/results/qf-aufnia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AUFNIA (Unsat Core Track)

    Competition results for the QF_AUFNIA - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_AUFNIA (Unsat Core Track)

    Sequential PerformanceParallel Performance MathSAT-defaultMathSAT-default - - + + @@ -126,7 +126,7 @@

    QF_AUFNIA (Unsat Core Track)

    - + 2018-Z3 (unsat core)n 0 20336 @@ -137,7 +137,7 @@

    QF_AUFNIA (Unsat Core Track)

    - + Z3n 0 20300 @@ -148,7 +148,7 @@

    QF_AUFNIA (Unsat Core Track)

    - + MathSAT-default 0 20251 @@ -159,7 +159,7 @@

    QF_AUFNIA (Unsat Core Track)

    - + CVC4-uc 0 20226 @@ -170,7 +170,7 @@

    QF_AUFNIA (Unsat Core Track)

    - + MathSAT-na-ext 0 20215 @@ -192,7 +192,7 @@

    QF_AUFNIA (Unsat Core Track)

    - + 2018-Z3 (unsat core)n 0 203361.4161.4170 @@ -201,7 +201,7 @@

    QF_AUFNIA (Unsat Core Track)

    - + Z3n 0 203001.4151.4160 @@ -210,7 +210,7 @@

    QF_AUFNIA (Unsat Core Track)

    - + MathSAT-default 0 2025111.11411.1150 @@ -219,7 +219,7 @@

    QF_AUFNIA (Unsat Core Track)

    - + CVC4-uc 0 2022634.49834.5010 @@ -228,7 +228,7 @@

    QF_AUFNIA (Unsat Core Track)

    - + MathSAT-na-ext 0 2021510.11410.1140 @@ -252,7 +252,6 @@

    QF_AUFNIA (Unsat Core Track)

    - + - diff --git a/archive/2019/results/qf-ax-single-query.html b/archive/2019/results/qf-ax-single-query.html index 6382a38b..15ff1983 100644 --- a/archive/2019/results/qf-ax-single-query.html +++ b/archive/2019/results/qf-ax-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AX (Single Query Track)

    Competition results for the QF_AX - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_AX (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Yices 2.6.2Yices 2.6.2Yices 2.6.2 - - + + Yices 2.6.2 - - + + Yices 2.6.2 - + @@ -131,7 +131,7 @@

    QF_AX (Single Query Track)

    - + Yices 2.6.2 0 300 @@ -142,7 +142,7 @@

    QF_AX (Single Query Track)

    - + 2018-Yicesn 0 300 @@ -153,7 +153,7 @@

    QF_AX (Single Query Track)

    - + Z3n 0 300 @@ -164,7 +164,7 @@

    QF_AX (Single Query Track)

    - + CVC4 0 300 @@ -175,7 +175,7 @@

    QF_AX (Single Query Track)

    - + SMTInterpol 0 300 @@ -186,7 +186,7 @@

    QF_AX (Single Query Track)

    - + Alt-Ergo 0 138 @@ -208,7 +208,7 @@

    QF_AX (Single Query Track)

    - + 2018-Yicesn 0 3004.2544.80530015015000 @@ -217,7 +217,7 @@

    QF_AX (Single Query Track)

    - + Yices 2.6.2 0 3004.1464.81430015015000 @@ -226,7 +226,7 @@

    QF_AX (Single Query Track)

    - + Z3n 0 30025.78325.79330015015000 @@ -235,7 +235,7 @@

    QF_AX (Single Query Track)

    - + CVC4 0 30071.2871.22330015015000 @@ -244,7 +244,7 @@

    QF_AX (Single Query Track)

    - + SMTInterpol 0 300401.031181.98330015015000 @@ -253,7 +253,7 @@

    QF_AX (Single Query Track)

    - + Alt-Ergo 0 14782680.98626565.75914701471533 @@ -273,7 +273,7 @@

    QF_AX (Single Query Track)

    - + 2018-Yicesn 0 1501.2461.59715015001500 @@ -282,7 +282,7 @@

    QF_AX (Single Query Track)

    - + Yices 2.6.2 0 1501.1991.63215015001500 @@ -291,7 +291,7 @@

    QF_AX (Single Query Track)

    - + CVC4 0 1508.2038.16515015001500 @@ -300,7 +300,7 @@

    QF_AX (Single Query Track)

    - + Z3n 0 1509.1139.12115015001500 @@ -309,7 +309,7 @@

    QF_AX (Single Query Track)

    - + SMTInterpol 0 150112.67560.14115015001500 @@ -318,7 +318,7 @@

    QF_AX (Single Query Track)

    - + Alt-Ergo 0 04348.3591447.1840003003 @@ -338,7 +338,7 @@

    QF_AX (Single Query Track)

    - + Yices 2.6.2 0 1502.9473.18215001501500 @@ -347,7 +347,7 @@

    QF_AX (Single Query Track)

    - + 2018-Yicesn 0 1503.0083.20815001501500 @@ -356,7 +356,7 @@

    QF_AX (Single Query Track)

    - + Z3n 0 15016.6716.67215001501500 @@ -365,7 +365,7 @@

    QF_AX (Single Query Track)

    - + CVC4 0 15063.07863.05815001501500 @@ -374,7 +374,7 @@

    QF_AX (Single Query Track)

    - + SMTInterpol 0 150288.356121.84215001501500 @@ -383,7 +383,7 @@

    QF_AX (Single Query Track)

    - + Alt-Ergo 0 14778332.62825118.57514701471533 @@ -403,7 +403,7 @@

    QF_AX (Single Query Track)

    - + 2018-Yicesn 0 3004.2544.80530015015000 @@ -412,7 +412,7 @@

    QF_AX (Single Query Track)

    - + Yices 2.6.2 0 3004.1464.81430015015000 @@ -421,7 +421,7 @@

    QF_AX (Single Query Track)

    - + Z3n 0 30025.78325.79330015015000 @@ -430,7 +430,7 @@

    QF_AX (Single Query Track)

    - + CVC4 0 30071.2871.22330015015000 @@ -439,7 +439,7 @@

    QF_AX (Single Query Track)

    - + SMTInterpol 0 300401.031181.98330015015000 @@ -448,7 +448,7 @@

    QF_AX (Single Query Track)

    - + Alt-Ergo 0 993802.3042361.3339909920176 @@ -472,7 +472,6 @@

    QF_AX (Single Query Track)

    - + - diff --git a/archive/2019/results/qf-ax-unsat-core.html b/archive/2019/results/qf-ax-unsat-core.html index d800f1bf..d4f2b6f9 100644 --- a/archive/2019/results/qf-ax-unsat-core.html +++ b/archive/2019/results/qf-ax-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AX (Unsat Core Track)

    Competition results for the QF_AX - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_AX (Unsat Core Track)

    Sequential PerformanceParallel Performance Yices 2.6.2Yices 2.6.2 - - + + @@ -126,7 +126,7 @@

    QF_AX (Unsat Core Track)

    - + 2018-Yices (unsat core)n 0 38645 @@ -137,7 +137,7 @@

    QF_AX (Unsat Core Track)

    - + Yices 2.6.2 0 38645 @@ -148,7 +148,7 @@

    QF_AX (Unsat Core Track)

    - + Z3n 0 37978 @@ -159,7 +159,7 @@

    QF_AX (Unsat Core Track)

    - + CVC4-uc 0 27032 @@ -170,7 +170,7 @@

    QF_AX (Unsat Core Track)

    - + SMTInterpol 0 471 @@ -192,7 +192,7 @@

    QF_AX (Unsat Core Track)

    - + 2018-Yices (unsat core)n 0 386455.8816.1130 @@ -201,7 +201,7 @@

    QF_AX (Unsat Core Track)

    - + Yices 2.6.2 0 386455.8896.2120 @@ -210,7 +210,7 @@

    QF_AX (Unsat Core Track)

    - + Z3n 0 3797827.57727.5810 @@ -219,7 +219,7 @@

    QF_AX (Unsat Core Track)

    - + CVC4-uc 0 270322210.5062211.6850 @@ -228,7 +228,7 @@

    QF_AX (Unsat Core Track)

    - + SMTInterpol 0 471375.087157.2640 @@ -252,7 +252,6 @@

    QF_AX (Unsat Core Track)

    - + - diff --git a/archive/2019/results/qf-bv-challenge-incremental.html b/archive/2019/results/qf-bv-challenge-incremental.html index b652871d..d4089a42 100644 --- a/archive/2019/results/qf-bv-challenge-incremental.html +++ b/archive/2019/results/qf-bv-challenge-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_BV (Challenge Track (incremental))

    Competition results for the QF_BV - + division - + in the Challenge Track (incremental).

    @@ -104,8 +104,8 @@

    QF_BV (Challenge Track (incremental))

    Parallel Performance Boolector (incremental) - - + + @@ -124,7 +124,7 @@

    QF_BV (Challenge Track (incremental))

    - + Boolector (incremental) 0 11025502.86125501.21600 @@ -133,7 +133,7 @@

    QF_BV (Challenge Track (incremental))

    - + 2018-MathSAT (incremental)n 0 10959895.54359899.04111 @@ -142,7 +142,7 @@

    QF_BV (Challenge Track (incremental))

    - + Yices 2.6.2 Incremental 0 102182741.58182747.1484 @@ -151,7 +151,7 @@

    QF_BV (Challenge Track (incremental))

    - + STP-incremental 0 9954913.16854913.122111 @@ -160,7 +160,7 @@

    QF_BV (Challenge Track (incremental))

    - + CVC4-inc-fixedn 0 9892348.0992352.21121 @@ -169,7 +169,7 @@

    QF_BV (Challenge Track (incremental))

    - + Z3n 0 97248932.47248936.83135 @@ -178,7 +178,7 @@

    QF_BV (Challenge Track (incremental))

    - + Minkeyrink Solver 0 865093.1075091.39240 @@ -187,7 +187,7 @@

    QF_BV (Challenge Track (incremental))

    - + Minkeyrink Solver MT 0 8013418.3894160.732300 @@ -196,7 +196,7 @@

    QF_BV (Challenge Track (incremental))

    - + STP-mt 0 3743203.9143204.606731 @@ -205,7 +205,7 @@

    QF_BV (Challenge Track (incremental))

    - + CVC4-inc 0 00.1441.0641100 @@ -229,7 +229,6 @@

    QF_BV (Challenge Track (incremental))

    - + - diff --git a/archive/2019/results/qf-bv-challenge-non-incremental.html b/archive/2019/results/qf-bv-challenge-non-incremental.html index 9ae1fcbd..47e2493d 100644 --- a/archive/2019/results/qf-bv-challenge-non-incremental.html +++ b/archive/2019/results/qf-bv-challenge-non-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_BV (Challenge Track (non-incremental))

    Competition results for the QF_BV - + division - + in the Challenge Track (non-incremental).

    @@ -104,13 +104,13 @@

    QF_BV (Challenge Track (non-incremental))

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) BoolectorMinkeyrink Solver MTMinkeyrink Solver MT - - + + — - - + + Minkeyrink Solver MT - + @@ -131,7 +131,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + 2018-Boolectorn 0 7 @@ -142,7 +142,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + Boolector 0 7 @@ -153,7 +153,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + Yices 2.6.2 CaDiCal 0 7 @@ -164,7 +164,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + Minkeyrink Solver MT 0 7 @@ -175,7 +175,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + Yices 2.6.2 New Bvsolver 0 7 @@ -186,7 +186,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + Minkeyrink Solver 0 7 @@ -197,7 +197,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + 2018-Minkeyrink MTn 0 7 @@ -208,7 +208,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + Poolector 0 7 @@ -219,7 +219,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + Yices 2.6.2 Cryptominisat 0 7 @@ -230,7 +230,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + Yices 2.6.2 0 7 @@ -241,7 +241,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + Z3n 0 7 @@ -252,7 +252,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + STP-minisat 0 7 @@ -263,7 +263,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + STP 0 7 @@ -274,7 +274,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + STP-mergesat-fixedn 0 6 @@ -285,7 +285,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + STP-portfolio-fixedn 0 6 @@ -296,7 +296,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + CVC4 0 6 @@ -307,7 +307,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + STP-mt 0 6 @@ -318,7 +318,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + STP-mergesat 0 0 @@ -329,7 +329,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + STP-riss 0 0 @@ -340,7 +340,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + STP-portfolio 0 0 @@ -362,7 +362,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + Minkeyrink Solver MT 0 7767.592443.4377000 @@ -371,7 +371,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + 2018-Minkeyrink MTn 0 71324.014536.01577000 @@ -380,7 +380,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + 2018-Boolectorn 0 7607.673607.76377000 @@ -389,7 +389,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + Poolector 0 72367.829640.277000 @@ -398,7 +398,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + Boolector 0 7708.912708.96277000 @@ -407,7 +407,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + Yices 2.6.2 CaDiCal 0 7716.181716.26577000 @@ -416,7 +416,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + Yices 2.6.2 New Bvsolver 0 71111.0141111.25177000 @@ -425,7 +425,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + Minkeyrink Solver 0 71150.6531150.72577000 @@ -434,7 +434,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + Yices 2.6.2 Cryptominisat 0 78453.7788454.78977000 @@ -443,7 +443,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + Yices 2.6.2 0 712696.42612697.88477000 @@ -452,7 +452,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + Z3n 0 731544.22631546.58377000 @@ -461,7 +461,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + STP-minisat 0 743053.77243054.44577000 @@ -470,7 +470,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + STP 0 743468.13643471.54877000 @@ -479,7 +479,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + STP-mergesat-fixedn 0 68134.538135.1566011 @@ -488,7 +488,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + STP-portfolio-fixedn 0 610278.8268184.24366011 @@ -497,7 +497,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + STP-mt 0 644113.04143499.46966011 @@ -506,7 +506,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + CVC4 0 644064.46944064.57366011 @@ -515,7 +515,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + STP-mergesat 0 042262.97342266.20500070 @@ -524,7 +524,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + STP-riss 0 042962.67342969.06800070 @@ -533,7 +533,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + STP-portfolio 0 044986.46544248.77100070 @@ -553,7 +553,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + Minkeyrink Solver MT 0 7767.592443.4377000 @@ -562,7 +562,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + 2018-Minkeyrink MTn 0 71324.014536.01577000 @@ -571,7 +571,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + 2018-Boolectorn 0 7607.673607.76377000 @@ -580,7 +580,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + Poolector 0 72367.829640.277000 @@ -589,7 +589,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + Boolector 0 7708.912708.96277000 @@ -598,7 +598,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + Yices 2.6.2 CaDiCal 0 7716.181716.26577000 @@ -607,7 +607,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + Yices 2.6.2 New Bvsolver 0 71111.0141111.25177000 @@ -616,7 +616,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + Minkeyrink Solver 0 71150.6531150.72577000 @@ -625,7 +625,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + Yices 2.6.2 Cryptominisat 0 78453.7788454.78977000 @@ -634,7 +634,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + Yices 2.6.2 0 712696.42612697.88477000 @@ -643,7 +643,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + Z3n 0 731544.22631546.58377000 @@ -652,7 +652,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + STP-minisat 0 743053.77243054.44577000 @@ -661,7 +661,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + STP 0 743468.13643471.54877000 @@ -670,7 +670,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + STP-mergesat-fixedn 0 68134.538135.1566011 @@ -679,7 +679,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + STP-portfolio-fixedn 0 610278.8268184.24366011 @@ -688,7 +688,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + STP-mt 0 644113.04143499.46966011 @@ -697,7 +697,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + CVC4 0 644064.46944064.57366011 @@ -706,7 +706,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + STP-mergesat 0 042262.97342266.20500070 @@ -715,7 +715,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + STP-riss 0 042962.67342969.06800070 @@ -724,7 +724,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + STP-portfolio 0 044986.46544248.77100070 @@ -744,7 +744,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + 2018-Boolectorn 0 00.00.000070 @@ -753,7 +753,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + 2018-Minkeyrink MTn 0 00.00.000070 @@ -762,7 +762,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + Boolector 0 00.00.000070 @@ -771,7 +771,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + CVC4 0 00.00.000071 @@ -780,7 +780,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + Minkeyrink Solver 0 00.00.000070 @@ -789,7 +789,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + Minkeyrink Solver MT 0 00.00.000070 @@ -798,7 +798,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + Poolector 0 00.00.000070 @@ -807,7 +807,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + STP 0 00.00.000070 @@ -816,7 +816,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + STP-mergesat 0 00.00.000070 @@ -825,7 +825,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + STP-minisat 0 00.00.000070 @@ -834,7 +834,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + STP-mt 0 00.00.000071 @@ -843,7 +843,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + STP-portfolio 0 00.00.000070 @@ -852,7 +852,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + STP-riss 0 00.00.000070 @@ -861,7 +861,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + Yices 2.6.2 0 00.00.000070 @@ -870,7 +870,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + Yices 2.6.2 CaDiCal 0 00.00.000070 @@ -879,7 +879,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + Yices 2.6.2 Cryptominisat 0 00.00.000070 @@ -888,7 +888,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + Yices 2.6.2 New Bvsolver 0 00.00.000070 @@ -897,7 +897,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + Z3n 0 00.00.000070 @@ -906,7 +906,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + STP-mergesat-fixedn 0 00.00.000071 @@ -915,7 +915,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + STP-portfolio-fixedn 0 00.00.000071 @@ -935,7 +935,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + Minkeyrink Solver MT 0 4194.556129.7244033 @@ -944,7 +944,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + STP-mt 0 3136.029117.55533044 @@ -953,7 +953,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + 2018-Minkeyrink MTn 0 3203.363133.49633044 @@ -962,7 +962,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + Minkeyrink Solver 0 2143.674143.67622055 @@ -971,7 +971,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + Z3n 0 2145.367145.36722055 @@ -980,7 +980,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + 2018-Boolectorn 0 2156.169156.17622055 @@ -989,7 +989,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + Poolector 0 2263.006156.36222055 @@ -998,7 +998,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + STP 0 1150.84150.84211066 @@ -1007,7 +1007,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + Boolector 0 1159.007159.00911066 @@ -1016,7 +1016,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + STP-minisat 0 1160.925160.92511066 @@ -1025,7 +1025,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + Yices 2.6.2 CaDiCal 0 1161.719161.71811066 @@ -1034,7 +1034,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + STP-mergesat 0 065.27365.30500071 @@ -1043,7 +1043,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + STP-riss 0 069.07374.66800071 @@ -1052,7 +1052,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + STP-portfolio 0 0162.852161.13200076 @@ -1061,7 +1061,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + CVC4 0 0168.0168.000077 @@ -1070,7 +1070,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + Yices 2.6.2 0 0168.0168.000077 @@ -1079,7 +1079,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + Yices 2.6.2 Cryptominisat 0 0168.0168.000077 @@ -1088,7 +1088,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + Yices 2.6.2 New Bvsolver 0 0168.0168.000077 @@ -1097,7 +1097,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + STP-mergesat-fixedn 0 0168.0168.000077 @@ -1106,7 +1106,7 @@

    QF_BV (Challenge Track (non-incremental))

    - + STP-portfolio-fixedn 0 0168.0168.000077 @@ -1130,7 +1130,6 @@

    QF_BV (Challenge Track (non-incremental))

    - + - diff --git a/archive/2019/results/qf-bv-incremental.html b/archive/2019/results/qf-bv-incremental.html index 46be632c..63736aaa 100644 --- a/archive/2019/results/qf-bv-incremental.html +++ b/archive/2019/results/qf-bv-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_BV (Incremental Track)

    Competition results for the QF_BV - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_BV (Incremental Track)

    Parallel Performance Yices 2.6.2 Incremental - - + + @@ -124,7 +124,7 @@

    QF_BV (Incremental Track)

    - + Yices 2.6.2 Incremental 0 2886436983.67436919.07453110 @@ -133,7 +133,7 @@

    QF_BV (Incremental Track)

    - + Minkeyrink Solver MT 0 2885748178.65931397.3445388 @@ -142,7 +142,7 @@

    QF_BV (Incremental Track)

    - + Minkeyrink Solver 0 2884831497.77831398.1485478 @@ -151,7 +151,7 @@

    QF_BV (Incremental Track)

    - + 2018-MathSAT (incremental)n 0 2884754562.53754442.8954813 @@ -160,7 +160,7 @@

    QF_BV (Incremental Track)

    - + STP-incremental 0 2880334147.75934042.7465929 @@ -169,7 +169,7 @@

    QF_BV (Incremental Track)

    - + Boolector (incremental) 0 2878536369.62736303.0096109 @@ -178,7 +178,7 @@

    QF_BV (Incremental Track)

    - + CVC4-inc-fixedn 0 2850486002.8985913.52289110 @@ -187,7 +187,7 @@

    QF_BV (Incremental Track)

    - + Z3n 0 28403188172.007188127.98399235 @@ -196,7 +196,7 @@

    QF_BV (Incremental Track)

    - + STP-mt 0 25599137.2815245.373268360 @@ -205,7 +205,7 @@

    QF_BV (Incremental Track)

    - + Boolector-ReasonLSn 0 371115.734162.487290240 @@ -214,7 +214,7 @@

    QF_BV (Incremental Track)

    - + CVC4-inc 0 06.57363.471293950 @@ -238,7 +238,6 @@

    QF_BV (Incremental Track)

    - + - diff --git a/archive/2019/results/qf-bv-model-validation.html b/archive/2019/results/qf-bv-model-validation.html index f01d836e..11366dec 100644 --- a/archive/2019/results/qf-bv-model-validation.html +++ b/archive/2019/results/qf-bv-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_BV (Model Validation Track (experimental))

    Competition results for the QF_BV - + division - + in the Model Validation Track (experimental).

    @@ -119,7 +119,7 @@

    QF_BV (Model Validation Track (experimental))

    - + Boolector 0 7171 @@ -130,7 +130,7 @@

    QF_BV (Model Validation Track (experimental))

    - + CVC4-mv 0 7152 @@ -141,7 +141,7 @@

    QF_BV (Model Validation Track (experimental))

    - + Yices 2.6.2 CaDiCal/SMT-LIB2 Models 0 7143 @@ -152,7 +152,7 @@

    QF_BV (Model Validation Track (experimental))

    - + Minkeyrink Solver 0 7142 @@ -163,7 +163,7 @@

    QF_BV (Model Validation Track (experimental))

    - + Yices 2.6.2 Model Validation 0 7129 @@ -174,7 +174,7 @@

    QF_BV (Model Validation Track (experimental))

    - + Yices 2.6.2 New Bvsolver with SMT2 Models 0 7128 @@ -185,7 +185,7 @@

    QF_BV (Model Validation Track (experimental))

    - + Minkeyrink Solver MT 0 7103 @@ -196,7 +196,7 @@

    QF_BV (Model Validation Track (experimental))

    - + Yices 2.6.2 Cryptominisat/SMT-LIB2 Models 0 7083 @@ -207,7 +207,7 @@

    QF_BV (Model Validation Track (experimental))

    - + STP-incremental 0 7009 @@ -218,7 +218,7 @@

    QF_BV (Model Validation Track (experimental))

    - + STP-mt 0 0 @@ -240,7 +240,7 @@

    QF_BV (Model Validation Track (experimental))

    - + Boolector 0 7171161315.079161333.58919 @@ -249,7 +249,7 @@

    QF_BV (Model Validation Track (experimental))

    - + CVC4-mv 0 7152232909.657232606.84131 @@ -258,7 +258,7 @@

    QF_BV (Model Validation Track (experimental))

    - + Minkeyrink Solver MT 0 7151506451.066203284.05239 @@ -267,7 +267,7 @@

    QF_BV (Model Validation Track (experimental))

    - + Yices 2.6.2 CaDiCal/SMT-LIB2 Models 0 7143202455.451202363.9447 @@ -276,7 +276,7 @@

    QF_BV (Model Validation Track (experimental))

    - + Minkeyrink Solver 0 7142239984.177239869.27248 @@ -285,7 +285,7 @@

    QF_BV (Model Validation Track (experimental))

    - + Yices 2.6.2 Model Validation 0 7129282747.331282736.84761 @@ -294,7 +294,7 @@

    QF_BV (Model Validation Track (experimental))

    - + Yices 2.6.2 New Bvsolver with SMT2 Models 0 7128241779.532241783.66662 @@ -303,7 +303,7 @@

    QF_BV (Model Validation Track (experimental))

    - + Yices 2.6.2 Cryptominisat/SMT-LIB2 Models 0 7083374452.909374113.933105 @@ -312,7 +312,7 @@

    QF_BV (Model Validation Track (experimental))

    - + STP-incremental 0 7009676666.938677268.215179 @@ -321,7 +321,7 @@

    QF_BV (Model Validation Track (experimental))

    - + STP-mt 0 01122106.179592689.298166 @@ -345,7 +345,6 @@

    QF_BV (Model Validation Track (experimental))

    - + - diff --git a/archive/2019/results/qf-bv-single-query.html b/archive/2019/results/qf-bv-single-query.html index 3e71a134..1d8d6f2a 100644 --- a/archive/2019/results/qf-bv-single-query.html +++ b/archive/2019/results/qf-bv-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_BV (Single Query Track)

    Competition results for the QF_BV - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_BV (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) BoolectorPoolectorPoolector - - + + Poolector - - + + Par4 - + @@ -131,7 +131,7 @@

    QF_BV (Single Query Track)

    - + Boolector 0 8751 @@ -142,7 +142,7 @@

    QF_BV (Single Query Track)

    - + Yices 2.6.2 CaDiCal 0 8718 @@ -153,7 +153,7 @@

    QF_BV (Single Query Track)

    - + Poolector 0 8685 @@ -164,7 +164,7 @@

    QF_BV (Single Query Track)

    - + Par4 0 8684 @@ -175,7 +175,7 @@

    QF_BV (Single Query Track)

    - + Yices 2.6.2 New Bvsolver 0 8682 @@ -186,7 +186,7 @@

    QF_BV (Single Query Track)

    - + Minkeyrink Solver 0 8665 @@ -197,7 +197,7 @@

    QF_BV (Single Query Track)

    - + 2018-Boolectorn 0 8643 @@ -208,7 +208,7 @@

    QF_BV (Single Query Track)

    - + Yices 2.6.2 Cryptominisat 0 8638 @@ -219,7 +219,7 @@

    QF_BV (Single Query Track)

    - + CVC4 0 8586 @@ -230,7 +230,7 @@

    QF_BV (Single Query Track)

    - + Minkeyrink Solver MT 0 8581 @@ -241,7 +241,7 @@

    QF_BV (Single Query Track)

    - + 2018-Minkeyrink MTn 0 8565 @@ -252,7 +252,7 @@

    QF_BV (Single Query Track)

    - + Boolector-ReasonLSn 0 8517 @@ -263,7 +263,7 @@

    QF_BV (Single Query Track)

    - + Yices 2.6.2 0 8438 @@ -274,7 +274,7 @@

    QF_BV (Single Query Track)

    - + STP-incremental 0 8344 @@ -285,7 +285,7 @@

    QF_BV (Single Query Track)

    - + STP 0 8337 @@ -296,7 +296,7 @@

    QF_BV (Single Query Track)

    - + STP-mergesat-fixedn 0 8254 @@ -307,7 +307,7 @@

    QF_BV (Single Query Track)

    - + STP-mt 0 8161 @@ -318,7 +318,7 @@

    QF_BV (Single Query Track)

    - + STP-minisat 0 7780 @@ -329,7 +329,7 @@

    QF_BV (Single Query Track)

    - + Z3n 0 7694 @@ -340,7 +340,7 @@

    QF_BV (Single Query Track)

    - + STP-portfolio-fixedn 0 7484 @@ -351,7 +351,7 @@

    QF_BV (Single Query Track)

    - + Yices 2.6.2 mcsat-bv 0 5181 @@ -362,7 +362,7 @@

    QF_BV (Single Query Track)

    - + STP-riss 0 2682 @@ -373,7 +373,7 @@

    QF_BV (Single Query Track)

    - + STP-mergesat 0 2548 @@ -384,7 +384,7 @@

    QF_BV (Single Query Track)

    - + STP-portfolio 0 2548 @@ -406,7 +406,7 @@

    QF_BV (Single Query Track)

    - + Poolector 0 87681093383.208530089.056876830785690141141 @@ -415,7 +415,7 @@

    QF_BV (Single Query Track)

    - + Boolector 0 8751593150.512592971.584875130715680158158 @@ -424,7 +424,7 @@

    QF_BV (Single Query Track)

    - + Par4 0 8730840952.893567741.53873030615669179179 @@ -433,7 +433,7 @@

    QF_BV (Single Query Track)

    - + Yices 2.6.2 CaDiCal 0 8718660229.751660053.672871830535665191191 @@ -442,7 +442,7 @@

    QF_BV (Single Query Track)

    - + Yices 2.6.2 New Bvsolver 0 8682715556.255715544.03868230345648227227 @@ -451,7 +451,7 @@

    QF_BV (Single Query Track)

    - + Minkeyrink Solver MT 0 86671321390.108759870.116866730475620242234 @@ -460,7 +460,7 @@

    QF_BV (Single Query Track)

    - + Minkeyrink Solver 0 8665821227.621821074.187866530475618244244 @@ -469,7 +469,7 @@

    QF_BV (Single Query Track)

    - + 2018-Minkeyrink MTn 0 86461327000.077802191.144864630325614263256 @@ -478,7 +478,7 @@

    QF_BV (Single Query Track)

    - + 2018-Boolectorn 0 8643836167.075836015.557864329965647266266 @@ -487,7 +487,7 @@

    QF_BV (Single Query Track)

    - + Yices 2.6.2 Cryptominisat 0 8638904474.613904166.715863829965642271270 @@ -496,7 +496,7 @@

    QF_BV (Single Query Track)

    - + CVC4 0 85861089282.4111088825.654858630605526323312 @@ -505,7 +505,7 @@

    QF_BV (Single Query Track)

    - + Boolector-ReasonLSn 0 85171423150.1871423162.935851729035614392391 @@ -514,7 +514,7 @@

    QF_BV (Single Query Track)

    - + Yices 2.6.2 0 84381375826.9861375712.481843830355403471471 @@ -523,7 +523,7 @@

    QF_BV (Single Query Track)

    - + STP-mt 0 83652981057.9331729291.895836529345431544534 @@ -532,7 +532,7 @@

    QF_BV (Single Query Track)

    - + STP-incremental 0 83441871251.9861872005.122834429155429565562 @@ -541,7 +541,7 @@

    QF_BV (Single Query Track)

    - + STP 0 83371866152.0511866844.69833729115426572569 @@ -550,7 +550,7 @@

    QF_BV (Single Query Track)

    - + STP-mergesat-fixedn 0 82542206568.8142207327.348825429085346655652 @@ -559,7 +559,7 @@

    QF_BV (Single Query Track)

    - + STP-minisat 0 77803178347.9663179082.25977802826495411291126 @@ -568,7 +568,7 @@

    QF_BV (Single Query Track)

    - + Z3n 0 76943558315.3053558001.58776942790490412151215 @@ -577,7 +577,7 @@

    QF_BV (Single Query Track)

    - + STP-portfolio-fixedn 0 75513783376.7863466055.19275512798475313581191 @@ -586,7 +586,7 @@

    QF_BV (Single Query Track)

    - + Yices 2.6.2 mcsat-bv 0 51819404928.7179404914.60751811641354037283593 @@ -595,7 +595,7 @@

    QF_BV (Single Query Track)

    - + STP-riss 0 268255309.26357163.181268272675622713 @@ -604,7 +604,7 @@

    QF_BV (Single Query Track)

    - + STP-mergesat 0 254854344.18455346.059254872541636113 @@ -613,7 +613,7 @@

    QF_BV (Single Query Track)

    - + STP-portfolio 0 25483467415.5093306837.44325487254163611146 @@ -633,7 +633,7 @@

    QF_BV (Single Query Track)

    - + Poolector 0 3078438355.315145158.463078307805831141 @@ -642,7 +642,7 @@

    QF_BV (Single Query Track)

    - + Boolector 0 3071176679.897176536.663071307105838158 @@ -651,7 +651,7 @@

    QF_BV (Single Query Track)

    - + Par4 0 3061276742.106150188.4213061306105848179 @@ -660,7 +660,7 @@

    QF_BV (Single Query Track)

    - + CVC4 0 3060235704.509235461.1113060306005849312 @@ -669,7 +669,7 @@

    QF_BV (Single Query Track)

    - + Yices 2.6.2 CaDiCal 0 3053184986.443184856.1643053305305856191 @@ -678,7 +678,7 @@

    QF_BV (Single Query Track)

    - + Minkeyrink Solver MT 0 3047435207.89204200.5053047304705862234 @@ -687,7 +687,7 @@

    QF_BV (Single Query Track)

    - + Minkeyrink Solver 0 3047229254.087229205.083047304705862244 @@ -696,7 +696,7 @@

    QF_BV (Single Query Track)

    - + Yices 2.6.2 0 3035278569.066278543.5143035303505874471 @@ -705,7 +705,7 @@

    QF_BV (Single Query Track)

    - + Yices 2.6.2 New Bvsolver 0 3034238308.93238309.5573034303405875227 @@ -714,7 +714,7 @@

    QF_BV (Single Query Track)

    - + 2018-Minkeyrink MTn 0 3032450610.002236162.0433032303205877256 @@ -723,7 +723,7 @@

    QF_BV (Single Query Track)

    - + 2018-Boolectorn 0 2996348134.384348099.1352996299605913266 @@ -732,7 +732,7 @@

    QF_BV (Single Query Track)

    - + Yices 2.6.2 Cryptominisat 0 2996377258.544376984.8522996299605913270 @@ -741,7 +741,7 @@

    QF_BV (Single Query Track)

    - + STP-mt 0 29341079007.308572695.3332934293405975534 @@ -750,7 +750,7 @@

    QF_BV (Single Query Track)

    - + STP-incremental 0 2915644719.44645278.1922915291505994562 @@ -759,7 +759,7 @@

    QF_BV (Single Query Track)

    - + STP 0 2911635258.955635924.9682911291105998569 @@ -768,7 +768,7 @@

    QF_BV (Single Query Track)

    - + STP-mergesat-fixedn 0 2908644452.097645070.6532908290806001652 @@ -777,7 +777,7 @@

    QF_BV (Single Query Track)

    - + Boolector-ReasonLSn 0 2903836270.517836467.7872903290306006391 @@ -786,7 +786,7 @@

    QF_BV (Single Query Track)

    - + STP-minisat 0 2826811345.848811874.13828262826060831126 @@ -795,7 +795,7 @@

    QF_BV (Single Query Track)

    - + STP-portfolio-fixedn 0 27981096145.233945644.34527982798061111191 @@ -804,7 +804,7 @@

    QF_BV (Single Query Track)

    - + Z3n 0 27901089388.131089230.12527902790061191215 @@ -813,7 +813,7 @@

    QF_BV (Single Query Track)

    - + Yices 2.6.2 mcsat-bv 0 16413703682.8853703652.70416411641072683593 @@ -822,7 +822,7 @@

    QF_BV (Single Query Track)

    - + STP-mergesat 0 743068.28643814.377770890213 @@ -831,7 +831,7 @@

    QF_BV (Single Query Track)

    - + STP-riss 0 743780.17145101.62770890213 @@ -840,7 +840,7 @@

    QF_BV (Single Query Track)

    - + STP-portfolio 0 7973373.55898952.25977089021146 @@ -860,7 +860,7 @@

    QF_BV (Single Query Track)

    - + Poolector 0 5690520627.893250530.5955690056903219141 @@ -869,7 +869,7 @@

    QF_BV (Single Query Track)

    - + Boolector 0 5680282070.615282034.9245680056803229158 @@ -878,7 +878,7 @@

    QF_BV (Single Query Track)

    - + Par4 0 5669429810.787283153.1095669056693240179 @@ -887,7 +887,7 @@

    QF_BV (Single Query Track)

    - + Yices 2.6.2 CaDiCal 0 5665340843.307340797.5085665056653244191 @@ -896,7 +896,7 @@

    QF_BV (Single Query Track)

    - + Yices 2.6.2 New Bvsolver 0 5648342847.326342834.4735648056483261227 @@ -905,7 +905,7 @@

    QF_BV (Single Query Track)

    - + 2018-Boolectorn 0 5647353632.69353516.4225647056473262266 @@ -914,7 +914,7 @@

    QF_BV (Single Query Track)

    - + Yices 2.6.2 Cryptominisat 0 5642392816.069392781.8635642056423267270 @@ -923,7 +923,7 @@

    QF_BV (Single Query Track)

    - + Minkeyrink Solver MT 0 5620751782.218421269.6115620056203289234 @@ -932,7 +932,7 @@

    QF_BV (Single Query Track)

    - + Minkeyrink Solver 0 5618457573.534457469.1075618056183291244 @@ -941,7 +941,7 @@

    QF_BV (Single Query Track)

    - + 2018-Minkeyrink MTn 0 5614741990.075431629.15614056143295256 @@ -950,7 +950,7 @@

    QF_BV (Single Query Track)

    - + Boolector-ReasonLSn 0 5614452479.67452295.1485614056143295391 @@ -959,7 +959,7 @@

    QF_BV (Single Query Track)

    - + CVC4 0 5526719177.902718964.5445526055263383312 @@ -968,7 +968,7 @@

    QF_BV (Single Query Track)

    - + STP-mt 0 54311767650.6251022196.5615431054313478534 @@ -977,7 +977,7 @@

    QF_BV (Single Query Track)

    - + STP-incremental 0 54291092132.5461092326.935429054293480562 @@ -986,7 +986,7 @@

    QF_BV (Single Query Track)

    - + STP 0 54261096493.0971096519.7215426054263483569 @@ -995,7 +995,7 @@

    QF_BV (Single Query Track)

    - + Yices 2.6.2 0 5403962857.921962768.9665403054033506471 @@ -1004,7 +1004,7 @@

    QF_BV (Single Query Track)

    - + STP-mergesat-fixedn 0 53461427716.7171427856.6955346053463563652 @@ -1013,7 +1013,7 @@

    QF_BV (Single Query Track)

    - + STP-minisat 0 49542232602.1182232808.12149540495439551126 @@ -1022,7 +1022,7 @@

    QF_BV (Single Query Track)

    - + Z3n 0 49042334527.1752334371.46249040490440051215 @@ -1031,7 +1031,7 @@

    QF_BV (Single Query Track)

    - + STP-portfolio-fixedn 0 47532552831.5532386010.84747530475341561191 @@ -1040,7 +1040,7 @@

    QF_BV (Single Query Track)

    - + Yices 2.6.2 mcsat-bv 0 35405566845.8325566861.90335400354053693593 @@ -1049,7 +1049,7 @@

    QF_BV (Single Query Track)

    - + STP-riss 0 267510051.38110365.081267502675623413 @@ -1058,7 +1058,7 @@

    QF_BV (Single Query Track)

    - + STP-mergesat 0 25419913.66110169.042254102541636813 @@ -1067,7 +1067,7 @@

    QF_BV (Single Query Track)

    - + STP-portfolio 0 25412359641.9592273485.18425410254163681146 @@ -1087,7 +1087,7 @@

    QF_BV (Single Query Track)

    - + Par4 0 819442231.53826483.383819427275467715715 @@ -1096,7 +1096,7 @@

    QF_BV (Single Query Track)

    - + Poolector 0 812488009.53537429.328812427445380785785 @@ -1105,7 +1105,7 @@

    QF_BV (Single Query Track)

    - + Yices 2.6.2 New Bvsolver 0 810029071.74429044.283810026755425809809 @@ -1114,7 +1114,7 @@

    QF_BV (Single Query Track)

    - + Boolector 0 807738571.64738428.277807727065371832832 @@ -1123,7 +1123,7 @@

    QF_BV (Single Query Track)

    - + Yices 2.6.2 CaDiCal 0 805230452.45930372.569805226845368857857 @@ -1132,7 +1132,7 @@

    QF_BV (Single Query Track)

    - + Minkeyrink Solver MT 0 803860019.20133890.259803827275311871871 @@ -1141,7 +1141,7 @@

    QF_BV (Single Query Track)

    - + 2018-Minkeyrink MTn 0 801859960.22934522.524801827065312891891 @@ -1150,7 +1150,7 @@

    QF_BV (Single Query Track)

    - + Minkeyrink Solver 0 798536144.08336022.984798526735312924924 @@ -1159,7 +1159,7 @@

    QF_BV (Single Query Track)

    - + 2018-Boolectorn 0 770541873.38641768.30577052344536112041204 @@ -1168,7 +1168,7 @@

    QF_BV (Single Query Track)

    - + Yices 2.6.2 0 766837050.13237001.14976682542512612411241 @@ -1177,7 +1177,7 @@

    QF_BV (Single Query Track)

    - + Yices 2.6.2 Cryptominisat 0 763348382.59548180.95876332279535412761276 @@ -1186,7 +1186,7 @@

    QF_BV (Single Query Track)

    - + CVC4 0 712471532.45171195.96471242178494617851779 @@ -1195,7 +1195,7 @@

    QF_BV (Single Query Track)

    - + STP-mt 0 698697515.74863991.11269862382460419231923 @@ -1204,7 +1204,7 @@

    QF_BV (Single Query Track)

    - + Boolector-ReasonLSn 0 696268178.87168006.80769621697526519471947 @@ -1213,7 +1213,7 @@

    QF_BV (Single Query Track)

    - + STP 0 691166713.13666483.66569112318459319981998 @@ -1222,7 +1222,7 @@

    QF_BV (Single Query Track)

    - + STP-incremental 0 691066356.01266221.11369102319459119991999 @@ -1231,7 +1231,7 @@

    QF_BV (Single Query Track)

    - + STP-minisat 0 667768567.19968435.63666772370430722322232 @@ -1240,7 +1240,7 @@

    QF_BV (Single Query Track)

    - + STP-mergesat-fixedn 0 664087238.38186860.866402209443122692269 @@ -1249,7 +1249,7 @@

    QF_BV (Single Query Track)

    - + STP-portfolio-fixedn 0 6119107244.34693996.03161192050406927902626 @@ -1258,7 +1258,7 @@

    QF_BV (Single Query Track)

    - + Z3n 0 585989765.66189636.24858591727413230503050 @@ -1267,7 +1267,7 @@

    QF_BV (Single Query Track)

    - + Yices 2.6.2 mcsat-bv 0 4388113651.8113615.39243881214317445214387 @@ -1276,7 +1276,7 @@

    QF_BV (Single Query Track)

    - + STP-riss 0 260214339.45114457.5492602725956307169 @@ -1285,7 +1285,7 @@

    QF_BV (Single Query Track)

    - + STP-mergesat 0 246813752.53813652.8342468724616441154 @@ -1294,7 +1294,7 @@

    QF_BV (Single Query Track)

    - + STP-portfolio 0 246880514.60575144.61124687246164412410 @@ -1318,7 +1318,6 @@

    QF_BV (Single Query Track)

    - + - diff --git a/archive/2019/results/qf-bv-unsat-core.html b/archive/2019/results/qf-bv-unsat-core.html index 3e3e5d05..fda00866 100644 --- a/archive/2019/results/qf-bv-unsat-core.html +++ b/archive/2019/results/qf-bv-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_BV (Unsat Core Track)

    Competition results for the QF_BV - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_BV (Unsat Core Track)

    Sequential PerformanceParallel Performance Yices 2.6.2Yices 2.6.2 - - + + @@ -126,7 +126,7 @@

    QF_BV (Unsat Core Track)

    - + 2018-Yices (unsat core)n 0 1634333 @@ -137,7 +137,7 @@

    QF_BV (Unsat Core Track)

    - + Yices 2.6.2 0 1634041 @@ -148,7 +148,7 @@

    QF_BV (Unsat Core Track)

    - + CVC4-uc 0 1482210 @@ -159,7 +159,7 @@

    QF_BV (Unsat Core Track)

    - + Z3n 0 1323438 @@ -181,7 +181,7 @@

    QF_BV (Unsat Core Track)

    - + 2018-Yices (unsat core)n 0 1634333553058.716553066.97204 @@ -190,7 +190,7 @@

    QF_BV (Unsat Core Track)

    - + Yices 2.6.2 0 1634041555760.188555757.976204 @@ -199,7 +199,7 @@

    QF_BV (Unsat Core Track)

    - + CVC4-uc 0 14822101295344.4311296734.213432 @@ -208,7 +208,7 @@

    QF_BV (Unsat Core Track)

    - + Z3n 0 13234382858151.4642858142.0551070 @@ -232,7 +232,6 @@

    QF_BV (Unsat Core Track)

    - + - diff --git a/archive/2019/results/qf-bvfp-single-query.html b/archive/2019/results/qf-bvfp-single-query.html index 1cf9acb6..ab45ff64 100644 --- a/archive/2019/results/qf-bvfp-single-query.html +++ b/archive/2019/results/qf-bvfp-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_BVFP (Single Query Track)

    Competition results for the QF_BVFP - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_BVFP (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Par4Par4Par4 - - + + Par4 - - + + Par4 - + @@ -131,7 +131,7 @@

    QF_BVFP (Single Query Track)

    - + Par4 0 516 @@ -142,7 +142,7 @@

    QF_BVFP (Single Query Track)

    - + 2018-CVC4n 0 515 @@ -153,7 +153,7 @@

    QF_BVFP (Single Query Track)

    - + CVC4 0 515 @@ -164,7 +164,7 @@

    QF_BVFP (Single Query Track)

    - + Z3n 0 511 @@ -186,7 +186,7 @@

    QF_BVFP (Single Query Track)

    - + Par4 0 5165822.4492086.41351622828800 @@ -195,7 +195,7 @@

    QF_BVFP (Single Query Track)

    - + 2018-CVC4n 0 5155865.9725860.39251522828711 @@ -204,7 +204,7 @@

    QF_BVFP (Single Query Track)

    - + CVC4 0 5156604.5996597.49151522828711 @@ -213,7 +213,7 @@

    QF_BVFP (Single Query Track)

    - + Z3n 0 51128690.45528669.36151122828355 @@ -233,7 +233,7 @@

    QF_BVFP (Single Query Track)

    - + Par4 0 2281144.94450.64822822802880 @@ -242,7 +242,7 @@

    QF_BVFP (Single Query Track)

    - + 2018-CVC4n 0 228607.298606.48122822802881 @@ -251,7 +251,7 @@

    QF_BVFP (Single Query Track)

    - + CVC4 0 2281517.9281512.14422822802881 @@ -260,7 +260,7 @@

    QF_BVFP (Single Query Track)

    - + Z3n 0 2282292.1592287.85422822802885 @@ -280,7 +280,7 @@

    QF_BVFP (Single Query Track)

    - + Par4 0 2884677.5091635.76428802882280 @@ -289,7 +289,7 @@

    QF_BVFP (Single Query Track)

    - + CVC4 0 2875086.6715085.34828702872291 @@ -298,7 +298,7 @@

    QF_BVFP (Single Query Track)

    - + 2018-CVC4n 0 2875258.6745253.91128702872291 @@ -307,7 +307,7 @@

    QF_BVFP (Single Query Track)

    - + Z3n 0 28326398.29626381.50728302832335 @@ -327,7 +327,7 @@

    QF_BVFP (Single Query Track)

    - + Par4 0 5001739.911979.5545002252751616 @@ -336,7 +336,7 @@

    QF_BVFP (Single Query Track)

    - + CVC4 0 4981619.3411611.9114982242741818 @@ -345,7 +345,7 @@

    QF_BVFP (Single Query Track)

    - + 2018-CVC4n 0 4981703.0871697.3074982252731818 @@ -354,7 +354,7 @@

    QF_BVFP (Single Query Track)

    - + Z3n 0 4213007.7622984.1034212112109595 @@ -378,7 +378,6 @@

    QF_BVFP (Single Query Track)

    - + - diff --git a/archive/2019/results/qf-dt-single-query.html b/archive/2019/results/qf-dt-single-query.html index 810b5693..a7a73639 100644 --- a/archive/2019/results/qf-dt-single-query.html +++ b/archive/2019/results/qf-dt-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_DT (Single Query Track)

    Competition results for the QF_DT - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_DT (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) CVC4CVC4CVC4 - - + + CVC4 - - + + CVC4 - + @@ -131,7 +131,7 @@

    QF_DT (Single Query Track)

    - + 2018-CVC4n 0 4 @@ -142,7 +142,7 @@

    QF_DT (Single Query Track)

    - + CVC4 0 4 @@ -153,7 +153,7 @@

    QF_DT (Single Query Track)

    - + Alt-Ergo 0 1 @@ -175,7 +175,7 @@

    QF_DT (Single Query Track)

    - + 2018-CVC4n 0 426.40526.40443100 @@ -184,7 +184,7 @@

    QF_DT (Single Query Track)

    - + CVC4 0 426.55526.5643100 @@ -193,7 +193,7 @@

    QF_DT (Single Query Track)

    - + Alt-Ergo 0 1926.995234.05510130 @@ -213,7 +213,7 @@

    QF_DT (Single Query Track)

    - + 2018-CVC4n 0 30.0470.04733010 @@ -222,7 +222,7 @@

    QF_DT (Single Query Track)

    - + CVC4 0 30.0540.05333010 @@ -231,7 +231,7 @@

    QF_DT (Single Query Track)

    - + Alt-Ergo 0 00.2550.11800040 @@ -251,7 +251,7 @@

    QF_DT (Single Query Track)

    - + 2018-CVC4n 0 126.35826.35710130 @@ -260,7 +260,7 @@

    QF_DT (Single Query Track)

    - + CVC4 0 126.50126.50710130 @@ -269,7 +269,7 @@

    QF_DT (Single Query Track)

    - + Alt-Ergo 0 1926.74233.93710130 @@ -289,7 +289,7 @@

    QF_DT (Single Query Track)

    - + 2018-CVC4n 0 324.04724.04733011 @@ -298,7 +298,7 @@

    QF_DT (Single Query Track)

    - + CVC4 0 324.05424.05333011 @@ -307,7 +307,7 @@

    QF_DT (Single Query Track)

    - + Alt-Ergo 0 024.25524.11800041 @@ -331,7 +331,6 @@

    QF_DT (Single Query Track)

    - + - diff --git a/archive/2019/results/qf-fp-single-query.html b/archive/2019/results/qf-fp-single-query.html index 71926077..bc75fad6 100644 --- a/archive/2019/results/qf-fp-single-query.html +++ b/archive/2019/results/qf-fp-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_FP (Single Query Track)

    Competition results for the QF_FP - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_FP (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Par4Par4Par4 - - + + Par4 - - + + COLIBRI - + @@ -131,7 +131,7 @@

    QF_FP (Single Query Track)

    - + Par4 0 202 @@ -142,7 +142,7 @@

    QF_FP (Single Query Track)

    - + COLIBRI 0 196 @@ -153,7 +153,7 @@

    QF_FP (Single Query Track)

    - + CVC4 0 180 @@ -164,7 +164,7 @@

    QF_FP (Single Query Track)

    - + 2018-COLIBRIn 0 175 @@ -175,7 +175,7 @@

    QF_FP (Single Query Track)

    - + Z3n 0 160 @@ -197,7 +197,7 @@

    QF_FP (Single Query Track)

    - + Par4 0 209146680.232111333.5892091091003939 @@ -206,7 +206,7 @@

    QF_FP (Single Query Track)

    - + COLIBRI 0 196128015.342128016.20319699975252 @@ -215,7 +215,7 @@

    QF_FP (Single Query Track)

    - + CVC4 0 180196176.551196166.875180100806868 @@ -224,7 +224,7 @@

    QF_FP (Single Query Track)

    - + 2018-COLIBRIn 0 175167509.698167506.95417587887365 @@ -233,7 +233,7 @@

    QF_FP (Single Query Track)

    - + Z3n 0 160282074.257282084.97716083778888 @@ -253,7 +253,7 @@

    QF_FP (Single Query Track)

    - + Par4 0 10955783.66236213.378109109013939 @@ -262,7 +262,7 @@

    QF_FP (Single Query Track)

    - + CVC4 0 10062274.45562262.227100100014868 @@ -271,7 +271,7 @@

    QF_FP (Single Query Track)

    - + COLIBRI 0 9951216.29651216.7889999014952 @@ -280,7 +280,7 @@

    QF_FP (Single Query Track)

    - + 2018-COLIBRIn 0 8767402.00967403.1628787016165 @@ -289,7 +289,7 @@

    QF_FP (Single Query Track)

    - + Z3n 0 83124323.789124330.0068383016588 @@ -309,7 +309,7 @@

    QF_FP (Single Query Track)

    - + Par4 0 10050096.5734320.211100010014839 @@ -318,7 +318,7 @@

    QF_FP (Single Query Track)

    - + COLIBRI 0 9735999.04635999.4159709715152 @@ -327,7 +327,7 @@

    QF_FP (Single Query Track)

    - + 2018-COLIBRIn 0 8859307.68959303.7928808816065 @@ -336,7 +336,7 @@

    QF_FP (Single Query Track)

    - + CVC4 0 8093102.09693104.6488008016868 @@ -345,7 +345,7 @@

    QF_FP (Single Query Track)

    - + Z3n 0 77116950.468116954.9717707717188 @@ -365,7 +365,7 @@

    QF_FP (Single Query Track)

    - + COLIBRI 0 1792131.2882132.03717993866969 @@ -374,7 +374,7 @@

    QF_FP (Single Query Track)

    - + Par4 0 1693177.4622356.89416987827979 @@ -383,7 +383,7 @@

    QF_FP (Single Query Track)

    - + 2018-COLIBRIn 0 1542626.9212623.41415480749488 @@ -392,7 +392,7 @@

    QF_FP (Single Query Track)

    - + CVC4 0 1133913.0723898.9221136449135135 @@ -401,7 +401,7 @@

    QF_FP (Single Query Track)

    - + Z3n 0 435217.0865217.113432419205205 @@ -425,7 +425,6 @@

    QF_FP (Single Query Track)

    - + - diff --git a/archive/2019/results/qf-idl-single-query.html b/archive/2019/results/qf-idl-single-query.html index c2d9b5f2..8ae21be7 100644 --- a/archive/2019/results/qf-idl-single-query.html +++ b/archive/2019/results/qf-idl-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_IDL (Single Query Track)

    Competition results for the QF_IDL - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_IDL (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Yices 2.6.2Par4Par4 - - + + Par4 - - + + Par4 - + @@ -131,7 +131,7 @@

    QF_IDL (Single Query Track)

    - + Z3n 0 938 @@ -142,7 +142,7 @@

    QF_IDL (Single Query Track)

    - + 2018-Yicesn 0 924 @@ -153,7 +153,7 @@

    QF_IDL (Single Query Track)

    - + Yices 2.6.2 0 924 @@ -164,7 +164,7 @@

    QF_IDL (Single Query Track)

    - + Par4 0 922 @@ -175,7 +175,7 @@

    QF_IDL (Single Query Track)

    - + CVC4 0 875 @@ -186,7 +186,7 @@

    QF_IDL (Single Query Track)

    - + veriT 0 773 @@ -197,7 +197,7 @@

    QF_IDL (Single Query Track)

    - + SMTInterpol 0 682 @@ -208,7 +208,7 @@

    QF_IDL (Single Query Track)

    - + ProB 0 173 @@ -230,7 +230,7 @@

    QF_IDL (Single Query Track)

    - + Par4 0 944410170.698279440.1149446073379898 @@ -239,7 +239,7 @@

    QF_IDL (Single Query Track)

    - + Z3n 0 938307948.649307943.617938600338104104 @@ -248,7 +248,7 @@

    QF_IDL (Single Query Track)

    - + 2018-Yicesn 0 924319895.38319898.304924597327118118 @@ -257,7 +257,7 @@

    QF_IDL (Single Query Track)

    - + Yices 2.6.2 0 924321146.745321138.774924597327118118 @@ -266,7 +266,7 @@

    QF_IDL (Single Query Track)

    - + CVC4 0 875507798.585507677.421875544331167167 @@ -275,7 +275,7 @@

    QF_IDL (Single Query Track)

    - + veriT 0 773784036.504784015.531773453320269269 @@ -284,7 +284,7 @@

    QF_IDL (Single Query Track)

    - + SMTInterpol 0 685974645.634967586.655685388297357357 @@ -293,7 +293,7 @@

    QF_IDL (Single Query Track)

    - + ProB 0 1732086466.2272086466.8911739875869864 @@ -313,7 +313,7 @@

    QF_IDL (Single Query Track)

    - + Par4 0 607209742.247121180.361607607043598 @@ -322,7 +322,7 @@

    QF_IDL (Single Query Track)

    - + Z3n 0 600148034.842148029.5176006000442104 @@ -331,7 +331,7 @@

    QF_IDL (Single Query Track)

    - + 2018-Yicesn 0 597141972.722141974.9085975970445118 @@ -340,7 +340,7 @@

    QF_IDL (Single Query Track)

    - + Yices 2.6.2 0 597143148.813143151.1255975970445118 @@ -349,7 +349,7 @@

    QF_IDL (Single Query Track)

    - + CVC4 0 544329589.187329521.4325445440498167 @@ -358,7 +358,7 @@

    QF_IDL (Single Query Track)

    - + veriT 0 453579254.707579233.8414534530589269 @@ -367,7 +367,7 @@

    QF_IDL (Single Query Track)

    - + SMTInterpol 0 388704547.303699904.1433883880654357 @@ -376,7 +376,7 @@

    QF_IDL (Single Query Track)

    - + ProB 0 981313511.3321313510.68698980944864 @@ -396,7 +396,7 @@

    QF_IDL (Single Query Track)

    - + Z3n 0 33890313.80790314.0993380338704104 @@ -405,7 +405,7 @@

    QF_IDL (Single Query Track)

    - + Par4 0 337130828.45288659.753337033770598 @@ -414,7 +414,7 @@

    QF_IDL (Single Query Track)

    - + CVC4 0 331108609.398108555.993310331711167 @@ -423,7 +423,7 @@

    QF_IDL (Single Query Track)

    - + 2018-Yicesn 0 327108322.659108323.3963270327715118 @@ -432,7 +432,7 @@

    QF_IDL (Single Query Track)

    - + Yices 2.6.2 0 327108397.933108387.6493270327715118 @@ -441,7 +441,7 @@

    QF_IDL (Single Query Track)

    - + veriT 0 320135181.797135181.6893200320722269 @@ -450,7 +450,7 @@

    QF_IDL (Single Query Track)

    - + SMTInterpol 0 297200498.331198082.5112970297745357 @@ -459,7 +459,7 @@

    QF_IDL (Single Query Track)

    - + ProB 0 75703354.894703356.20575075967864 @@ -479,7 +479,7 @@

    QF_IDL (Single Query Track)

    - + Par4 0 8319699.5626321.822831532299211211 @@ -488,7 +488,7 @@

    QF_IDL (Single Query Track)

    - + 2018-Yicesn 0 8186510.7676511.012818521297224224 @@ -497,7 +497,7 @@

    QF_IDL (Single Query Track)

    - + Yices 2.6.2 0 8176527.7296516.407817520297225225 @@ -506,7 +506,7 @@

    QF_IDL (Single Query Track)

    - + Z3n 0 7618519.3998508.844761477284281281 @@ -515,7 +515,7 @@

    QF_IDL (Single Query Track)

    - + CVC4 0 58513471.41913410.875585312273457457 @@ -524,7 +524,7 @@

    QF_IDL (Single Query Track)

    - + veriT 0 50114387.0314385.908501223278541541 @@ -533,7 +533,7 @@

    QF_IDL (Single Query Track)

    - + SMTInterpol 0 43918792.46316451.602439217222603603 @@ -542,7 +542,7 @@

    QF_IDL (Single Query Track)

    - + ProB 0 15221844.91621844.9911529458890887 @@ -566,7 +566,6 @@

    QF_IDL (Single Query Track)

    - + - diff --git a/archive/2019/results/qf-lia-incremental.html b/archive/2019/results/qf-lia-incremental.html index a2c544de..517b5cd7 100644 --- a/archive/2019/results/qf-lia-incremental.html +++ b/archive/2019/results/qf-lia-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LIA (Incremental Track)

    Competition results for the QF_LIA - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_LIA (Incremental Track)

    Parallel Performance Yices 2.6.2 Incremental - - + + @@ -124,7 +124,7 @@

    QF_LIA (Incremental Track)

    - + 2018-Yices (incremental)n 0 2004058251875.96351416.51363214 @@ -133,7 +133,7 @@

    QF_LIA (Incremental Track)

    - + Yices 2.6.2 Incremental 0 2004057252785.1852329.3864214 @@ -142,7 +142,7 @@

    QF_LIA (Incremental Track)

    - + Z3n 0 2003993580948.22180421.128127924 @@ -151,7 +151,7 @@

    QF_LIA (Incremental Track)

    - + SMTInterpol 0 1992768392010.06389918.03111353128 @@ -160,7 +160,7 @@

    QF_LIA (Incremental Track)

    - + CVC4-inc-fixedn 0 12152396108396.413108217.908788881839 @@ -169,7 +169,7 @@

    QF_LIA (Incremental Track)

    - + CVC4-inc 0 01.7264.551200412140 @@ -193,7 +193,6 @@

    QF_LIA (Incremental Track)

    - + - diff --git a/archive/2019/results/qf-lia-single-query.html b/archive/2019/results/qf-lia-single-query.html index 42d2f39f..d988d139 100644 --- a/archive/2019/results/qf-lia-single-query.html +++ b/archive/2019/results/qf-lia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LIA (Single Query Track)

    Competition results for the QF_LIA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_LIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Par4Par4Par4 - - + + Par4 - - + + Par4 - + @@ -131,7 +131,7 @@

    QF_LIA (Single Query Track)

    - + Par4 0 3072 @@ -142,7 +142,7 @@

    QF_LIA (Single Query Track)

    - + SPASS-SATT 0 3048 @@ -153,7 +153,7 @@

    QF_LIA (Single Query Track)

    - + 2018-SPASS-SATTn 0 3045 @@ -164,7 +164,7 @@

    QF_LIA (Single Query Track)

    - + Z3n 0 2946 @@ -175,7 +175,7 @@

    QF_LIA (Single Query Track)

    - + CVC4 0 2841 @@ -186,7 +186,7 @@

    QF_LIA (Single Query Track)

    - + Ctrl-Ergo 0 2797 @@ -197,7 +197,7 @@

    QF_LIA (Single Query Track)

    - + Yices 2.6.2 0 2744 @@ -208,7 +208,7 @@

    QF_LIA (Single Query Track)

    - + SMTInterpol 0 2734 @@ -219,7 +219,7 @@

    QF_LIA (Single Query Track)

    - + CVC4-SymBreakn 0 2646 @@ -230,7 +230,7 @@

    QF_LIA (Single Query Track)

    - + veriT 0 1113 @@ -241,7 +241,7 @@

    QF_LIA (Single Query Track)

    - + ProB 0 274 @@ -263,7 +263,7 @@

    QF_LIA (Single Query Track)

    - + Par4 0 3086215147.668152103.1963086173813485050 @@ -272,7 +272,7 @@

    QF_LIA (Single Query Track)

    - + SPASS-SATT 0 3048282208.177281992.1653048171313358884 @@ -281,7 +281,7 @@

    QF_LIA (Single Query Track)

    - + 2018-SPASS-SATTn 0 3045295076.397294907.4773045171713289187 @@ -290,7 +290,7 @@

    QF_LIA (Single Query Track)

    - + Z3n 0 2946549320.08549170.529294616631283190190 @@ -299,7 +299,7 @@

    QF_LIA (Single Query Track)

    - + CVC4 0 2841783358.407783165.991284115491292295292 @@ -308,7 +308,7 @@

    QF_LIA (Single Query Track)

    - + Ctrl-Ergo 0 28331124939.326825874.02628331534129930398 @@ -317,7 +317,7 @@

    QF_LIA (Single Query Track)

    - + Yices 2.6.2 0 27441050239.291050196.301274414761268392392 @@ -326,7 +326,7 @@

    QF_LIA (Single Query Track)

    - + SMTInterpol 0 27361205091.5241171803.842273614041332400400 @@ -335,7 +335,7 @@

    QF_LIA (Single Query Track)

    - + CVC4-SymBreakn 0 26461321757.4161328960.066264613621284490490 @@ -344,7 +344,7 @@

    QF_LIA (Single Query Track)

    - + veriT 0 11133338791.7853338736.023111373437920231324 @@ -353,7 +353,7 @@

    QF_LIA (Single Query Track)

    - + ProB 0 2746591933.666592287.1572741829228622718 @@ -373,7 +373,7 @@

    QF_LIA (Single Query Track)

    - + Par4 0 1738126913.52682532.942173817380139850 @@ -382,7 +382,7 @@

    QF_LIA (Single Query Track)

    - + 2018-SPASS-SATTn 0 1717162084.481161997.841171717170141987 @@ -391,7 +391,7 @@

    QF_LIA (Single Query Track)

    - + SPASS-SATT 0 1713165675.025165605.88171317130142384 @@ -400,7 +400,7 @@

    QF_LIA (Single Query Track)

    - + Z3n 0 1663297488.335297371.5031663166301473190 @@ -409,7 +409,7 @@

    QF_LIA (Single Query Track)

    - + CVC4 0 1549567155.175567059.031549154901587292 @@ -418,7 +418,7 @@

    QF_LIA (Single Query Track)

    - + Ctrl-Ergo 0 1534750026.962600208.104153415340160298 @@ -427,7 +427,7 @@

    QF_LIA (Single Query Track)

    - + Yices 2.6.2 0 1476775889.037775879.6771476147601660392 @@ -436,7 +436,7 @@

    QF_LIA (Single Query Track)

    - + SMTInterpol 0 14041045151.7381027742.5841404140401732400 @@ -445,7 +445,7 @@

    QF_LIA (Single Query Track)

    - + CVC4-SymBreakn 0 13621061142.2771064058.2921362136201774490 @@ -454,7 +454,7 @@

    QF_LIA (Single Query Track)

    - + veriT 0 7341606797.5541606762.357734734024021324 @@ -463,7 +463,7 @@

    QF_LIA (Single Query Track)

    - + ProB 0 1823585929.5833586284.736182182029542718 @@ -483,7 +483,7 @@

    QF_LIA (Single Query Track)

    - + Par4 0 134852234.14233570.254134801348178850 @@ -492,7 +492,7 @@

    QF_LIA (Single Query Track)

    - + SPASS-SATT 0 133580533.15280386.286133501335180184 @@ -501,7 +501,7 @@

    QF_LIA (Single Query Track)

    - + SMTInterpol 0 1332123939.786108061.2581332013321804400 @@ -510,7 +510,7 @@

    QF_LIA (Single Query Track)

    - + 2018-SPASS-SATTn 0 132896991.91696909.636132801328180887 @@ -519,7 +519,7 @@

    QF_LIA (Single Query Track)

    - + Ctrl-Ergo 0 1299338912.363189665.922129901299183798 @@ -528,7 +528,7 @@

    QF_LIA (Single Query Track)

    - + CVC4 0 1292180203.232180106.961292012921844292 @@ -537,7 +537,7 @@

    QF_LIA (Single Query Track)

    - + CVC4-SymBreakn 0 1284224615.139228901.7741284012841852490 @@ -546,7 +546,7 @@

    QF_LIA (Single Query Track)

    - + Z3n 0 1283215831.745215799.0261283012831853190 @@ -555,7 +555,7 @@

    QF_LIA (Single Query Track)

    - + Yices 2.6.2 0 1268238350.253238316.6241268012681868392 @@ -564,7 +564,7 @@

    QF_LIA (Single Query Track)

    - + veriT 0 3791695994.2311695973.666379037927571324 @@ -573,7 +573,7 @@

    QF_LIA (Single Query Track)

    - + ProB 0 922970004.0782970002.4219209230442718 @@ -593,7 +593,7 @@

    QF_LIA (Single Query Track)

    - + Par4 0 29949534.1855822.129299416651329142142 @@ -602,7 +602,7 @@

    QF_LIA (Single Query Track)

    - + SPASS-SATT 0 252723562.50923409.786252714581069609605 @@ -611,7 +611,7 @@

    QF_LIA (Single Query Track)

    - + Yices 2.6.2 0 252617620.52517568.902252612881238610610 @@ -620,7 +620,7 @@

    QF_LIA (Single Query Track)

    - + 2018-SPASS-SATTn 0 245024081.82324004.924245014031047686682 @@ -629,7 +629,7 @@

    QF_LIA (Single Query Track)

    - + Z3n 0 232926796.69626776.302232912801049807807 @@ -638,7 +638,7 @@

    QF_LIA (Single Query Track)

    - + Ctrl-Ergo 0 230146909.16927051.212230112081093835630 @@ -647,7 +647,7 @@

    QF_LIA (Single Query Track)

    - + CVC4 0 228131332.52131250.872228112291052855855 @@ -656,7 +656,7 @@

    QF_LIA (Single Query Track)

    - + SMTInterpol 0 165156168.71944749.132165181883314851485 @@ -665,7 +665,7 @@

    QF_LIA (Single Query Track)

    - + CVC4-SymBreakn 0 133249175.50249124.765133262870418041804 @@ -674,7 +674,7 @@

    QF_LIA (Single Query Track)

    - + veriT 0 90240494.27540454.03890261728522341576 @@ -683,7 +683,7 @@

    QF_LIA (Single Query Track)

    - + ProB 0 18669048.4769050.5131861325429502833 @@ -707,7 +707,6 @@

    QF_LIA (Single Query Track)

    - + - diff --git a/archive/2019/results/qf-lia-unsat-core.html b/archive/2019/results/qf-lia-unsat-core.html index 07298ea9..7cd098e2 100644 --- a/archive/2019/results/qf-lia-unsat-core.html +++ b/archive/2019/results/qf-lia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LIA (Unsat Core Track)

    Competition results for the QF_LIA - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_LIA (Unsat Core Track)

    Sequential PerformanceParallel Performance Yices 2.6.2Yices 2.6.2 - - + + @@ -126,7 +126,7 @@

    QF_LIA (Unsat Core Track)

    - + Yices 2.6.2 0 945236 @@ -137,7 +137,7 @@

    QF_LIA (Unsat Core Track)

    - + Z3n 0 944280 @@ -148,7 +148,7 @@

    QF_LIA (Unsat Core Track)

    - + 2018-SMTInterpol (unsat core)n 0 927528 @@ -159,7 +159,7 @@

    QF_LIA (Unsat Core Track)

    - + SMTInterpol 0 810967 @@ -170,7 +170,7 @@

    QF_LIA (Unsat Core Track)

    - + CVC4-uc 0 808319 @@ -192,7 +192,7 @@

    QF_LIA (Unsat Core Track)

    - + Yices 2.6.2 0 94523662777.98762779.09923 @@ -201,7 +201,7 @@

    QF_LIA (Unsat Core Track)

    - + Z3n 0 94428099525.3399526.13837 @@ -210,7 +210,7 @@

    QF_LIA (Unsat Core Track)

    - + 2018-SMTInterpol (unsat core)n 0 92752857083.87556289.00321 @@ -219,7 +219,7 @@

    QF_LIA (Unsat Core Track)

    - + SMTInterpol 0 81096761136.68160164.06122 @@ -228,7 +228,7 @@

    QF_LIA (Unsat Core Track)

    - + CVC4-uc 0 808319221330.924221333.45889 @@ -252,7 +252,6 @@

    QF_LIA (Unsat Core Track)

    - + - diff --git a/archive/2019/results/qf-lira-single-query.html b/archive/2019/results/qf-lira-single-query.html index f8e234ad..0714a4ec 100644 --- a/archive/2019/results/qf-lira-single-query.html +++ b/archive/2019/results/qf-lira-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LIRA (Single Query Track)

    Competition results for the QF_LIRA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_LIRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Par4Par4Yices 2.6.2 - - + + Par4 - - + + Yices 2.6.2 - + @@ -131,7 +131,7 @@

    QF_LIRA (Single Query Track)

    - + Par4 0 7 @@ -142,7 +142,7 @@

    QF_LIRA (Single Query Track)

    - + 2018-Z3n 0 6 @@ -153,7 +153,7 @@

    QF_LIRA (Single Query Track)

    - + Yices 2.6.2 0 6 @@ -164,7 +164,7 @@

    QF_LIRA (Single Query Track)

    - + Z3n 0 6 @@ -175,7 +175,7 @@

    QF_LIRA (Single Query Track)

    - + CVC4 0 6 @@ -186,7 +186,7 @@

    QF_LIRA (Single Query Track)

    - + SMTInterpol 0 4 @@ -208,7 +208,7 @@

    QF_LIRA (Single Query Track)

    - + Par4 0 7226.017113.79971600 @@ -217,7 +217,7 @@

    QF_LIRA (Single Query Track)

    - + 2018-Z3n 0 62463.492463.49961511 @@ -226,7 +226,7 @@

    QF_LIRA (Single Query Track)

    - + Yices 2.6.2 0 62504.6372504.65361511 @@ -235,7 +235,7 @@

    QF_LIRA (Single Query Track)

    - + Z3n 0 62638.2292638.27361511 @@ -244,7 +244,7 @@

    QF_LIRA (Single Query Track)

    - + CVC4 0 63475.6563476.08561511 @@ -253,7 +253,7 @@

    QF_LIRA (Single Query Track)

    - + SMTInterpol 0 47275.177238.00941333 @@ -273,7 +273,7 @@

    QF_LIRA (Single Query Track)

    - + Yices 2.6.2 0 10.0870.08711061 @@ -282,7 +282,7 @@

    QF_LIRA (Single Query Track)

    - + 2018-Z3n 0 10.3260.32711061 @@ -291,7 +291,7 @@

    QF_LIRA (Single Query Track)

    - + Par4 0 10.0060.3311060 @@ -300,7 +300,7 @@

    QF_LIRA (Single Query Track)

    - + Z3n 0 10.4090.40911061 @@ -309,7 +309,7 @@

    QF_LIRA (Single Query Track)

    - + CVC4 0 10.9580.95811061 @@ -318,7 +318,7 @@

    QF_LIRA (Single Query Track)

    - + SMTInterpol 0 119.9327.03811063 @@ -338,7 +338,7 @@

    QF_LIRA (Single Query Track)

    - + Par4 0 6226.012113.46960610 @@ -347,7 +347,7 @@

    QF_LIRA (Single Query Track)

    - + 2018-Z3n 0 52463.1632463.17250521 @@ -356,7 +356,7 @@

    QF_LIRA (Single Query Track)

    - + Yices 2.6.2 0 52504.5512504.56750521 @@ -365,7 +365,7 @@

    QF_LIRA (Single Query Track)

    - + Z3n 0 52637.822637.86450521 @@ -374,7 +374,7 @@

    QF_LIRA (Single Query Track)

    - + CVC4 0 53474.6973475.12750521 @@ -383,7 +383,7 @@

    QF_LIRA (Single Query Track)

    - + SMTInterpol 0 37255.2387230.97130343 @@ -403,7 +403,7 @@

    QF_LIRA (Single Query Track)

    - + Yices 2.6.2 0 548.48148.48151422 @@ -412,7 +412,7 @@

    QF_LIRA (Single Query Track)

    - + 2018-Z3n 0 550.75150.75151422 @@ -421,7 +421,7 @@

    QF_LIRA (Single Query Track)

    - + Par4 0 552.24750.79951422 @@ -430,7 +430,7 @@

    QF_LIRA (Single Query Track)

    - + Z3n 0 552.62452.62451422 @@ -439,7 +439,7 @@

    QF_LIRA (Single Query Track)

    - + CVC4 0 555.45655.45551422 @@ -448,7 +448,7 @@

    QF_LIRA (Single Query Track)

    - + SMTInterpol 0 3122.93105.531244 @@ -472,7 +472,6 @@

    QF_LIRA (Single Query Track)

    - + - diff --git a/archive/2019/results/qf-lira-unsat-core.html b/archive/2019/results/qf-lira-unsat-core.html index b9cfefb0..8f5881b7 100644 --- a/archive/2019/results/qf-lira-unsat-core.html +++ b/archive/2019/results/qf-lira-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LIRA (Unsat Core Track)

    Competition results for the QF_LIRA - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_LIRA (Unsat Core Track)

    Sequential PerformanceParallel Performance —— - - + + @@ -126,7 +126,7 @@

    QF_LIRA (Unsat Core Track)

    - + Z3n 0 0 @@ -137,7 +137,7 @@

    QF_LIRA (Unsat Core Track)

    - + 2018-Z3 (unsat core)n 0 0 @@ -148,7 +148,7 @@

    QF_LIRA (Unsat Core Track)

    - + Yices 2.6.2 0 0 @@ -159,7 +159,7 @@

    QF_LIRA (Unsat Core Track)

    - + CVC4-uc 0 0 @@ -170,7 +170,7 @@

    QF_LIRA (Unsat Core Track)

    - + SMTInterpol 0 0 @@ -192,7 +192,7 @@

    QF_LIRA (Unsat Core Track)

    - + Z3n 0 0603.64603.8490 @@ -201,7 +201,7 @@

    QF_LIRA (Unsat Core Track)

    - + 2018-Z3 (unsat core)n 0 0607.838608.1020 @@ -210,7 +210,7 @@

    QF_LIRA (Unsat Core Track)

    - + Yices 2.6.2 0 01083.5621083.9220 @@ -219,7 +219,7 @@

    QF_LIRA (Unsat Core Track)

    - + CVC4-uc 0 02409.7452409.7440 @@ -228,7 +228,7 @@

    QF_LIRA (Unsat Core Track)

    - + SMTInterpol 0 04861.0284833.222 @@ -252,7 +252,6 @@

    QF_LIRA (Unsat Core Track)

    - + - diff --git a/archive/2019/results/qf-lra-incremental.html b/archive/2019/results/qf-lra-incremental.html index 9fe021a0..cc8d11de 100644 --- a/archive/2019/results/qf-lra-incremental.html +++ b/archive/2019/results/qf-lra-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LRA (Incremental Track)

    Competition results for the QF_LRA - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_LRA (Incremental Track)

    Parallel Performance Yices 2.6.2 Incremental - - + + @@ -124,7 +124,7 @@

    QF_LRA (Incremental Track)

    - + 2018-MathSAT (incremental)n 0 129314732.13414731.6542225 @@ -133,7 +133,7 @@

    QF_LRA (Incremental Track)

    - + Yices 2.6.2 Incremental 0 100524000.024000.051010 @@ -142,7 +142,7 @@

    QF_LRA (Incremental Track)

    - + CVC4-inc 0 77724000.024000.073810 @@ -151,7 +151,7 @@

    QF_LRA (Incremental Track)

    - + SMTInterpol 0 76120684.20520605.8587548 @@ -160,7 +160,7 @@

    QF_LRA (Incremental Track)

    - + Z3n 0 74023594.2623594.527759 @@ -184,7 +184,6 @@

    QF_LRA (Incremental Track)

    - + - diff --git a/archive/2019/results/qf-lra-single-query.html b/archive/2019/results/qf-lra-single-query.html index 39d43867..cee3e334 100644 --- a/archive/2019/results/qf-lra-single-query.html +++ b/archive/2019/results/qf-lra-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LRA (Single Query Track)

    Competition results for the QF_LRA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_LRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) SPASS-SATTPar4Par4 - - + + Par4 - - + + Yices 2.6.2 - + @@ -131,7 +131,7 @@

    QF_LRA (Single Query Track)

    - + SPASS-SATT 0 527 @@ -142,7 +142,7 @@

    QF_LRA (Single Query Track)

    - + 2018-CVC4n 0 521 @@ -153,7 +153,7 @@

    QF_LRA (Single Query Track)

    - + Par4 0 518 @@ -164,7 +164,7 @@

    QF_LRA (Single Query Track)

    - + Yices 2.6.2 0 515 @@ -175,7 +175,7 @@

    QF_LRA (Single Query Track)

    - + CVC4 0 515 @@ -186,7 +186,7 @@

    QF_LRA (Single Query Track)

    - + CVC4-SymBreakn 0 505 @@ -197,7 +197,7 @@

    QF_LRA (Single Query Track)

    - + veriT 0 495 @@ -208,7 +208,7 @@

    QF_LRA (Single Query Track)

    - + SMTInterpol 0 486 @@ -219,7 +219,7 @@

    QF_LRA (Single Query Track)

    - + Z3n 0 447 @@ -230,7 +230,7 @@

    QF_LRA (Single Query Track)

    - + OpenSMT2 0 428 @@ -241,7 +241,7 @@

    QF_LRA (Single Query Track)

    - + Ctrl-Ergo 0 407 @@ -263,7 +263,7 @@

    QF_LRA (Single Query Track)

    - + Par4 0 535176281.83464378.6025352952401111 @@ -272,7 +272,7 @@

    QF_LRA (Single Query Track)

    - + SPASS-SATT 0 52783031.01883034.9625272882391919 @@ -281,7 +281,7 @@

    QF_LRA (Single Query Track)

    - + 2018-CVC4n 0 521137539.041137860.8085212822392525 @@ -290,7 +290,7 @@

    QF_LRA (Single Query Track)

    - + Yices 2.6.2 0 515121728.359121708.2415152802353131 @@ -299,7 +299,7 @@

    QF_LRA (Single Query Track)

    - + CVC4 0 515143132.718143324.465152782373131 @@ -308,7 +308,7 @@

    QF_LRA (Single Query Track)

    - + CVC4-SymBreakn 0 505202223.173202580.0735052732324141 @@ -317,7 +317,7 @@

    QF_LRA (Single Query Track)

    - + veriT 0 495187567.496187545.4314952632325151 @@ -326,7 +326,7 @@

    QF_LRA (Single Query Track)

    - + SMTInterpol 0 486250082.398244020.7494862702166060 @@ -335,7 +335,7 @@

    QF_LRA (Single Query Track)

    - + Z3n 0 447317760.969317767.144472282199999 @@ -344,7 +344,7 @@

    QF_LRA (Single Query Track)

    - + Ctrl-Ergo 0 444542492.471319839.014444255189102102 @@ -353,7 +353,7 @@

    QF_LRA (Single Query Track)

    - + OpenSMT2 0 428368277.243368247.382428207221118118 @@ -373,7 +373,7 @@

    QF_LRA (Single Query Track)

    - + Par4 0 29584047.69826681.626295295025111 @@ -382,7 +382,7 @@

    QF_LRA (Single Query Track)

    - + SPASS-SATT 0 28839922.18839924.218288288025819 @@ -391,7 +391,7 @@

    QF_LRA (Single Query Track)

    - + 2018-CVC4n 0 28286123.94586342.626282282026425 @@ -400,7 +400,7 @@

    QF_LRA (Single Query Track)

    - + Yices 2.6.2 0 28059070.98559048.791280280026631 @@ -409,7 +409,7 @@

    QF_LRA (Single Query Track)

    - + CVC4 0 27890831.17690982.437278278026831 @@ -418,7 +418,7 @@

    QF_LRA (Single Query Track)

    - + CVC4-SymBreakn 0 273111881.322112123.711273273027341 @@ -427,7 +427,7 @@

    QF_LRA (Single Query Track)

    - + SMTInterpol 0 270108749.031105455.906270270027660 @@ -436,7 +436,7 @@

    QF_LRA (Single Query Track)

    - + veriT 0 263121596.384121598.964263263028351 @@ -445,7 +445,7 @@

    QF_LRA (Single Query Track)

    - + Ctrl-Ergo 0 255232707.197135859.1842552550291102 @@ -454,7 +454,7 @@

    QF_LRA (Single Query Track)

    - + Z3n 0 228211038.291211040.782228228031899 @@ -463,7 +463,7 @@

    QF_LRA (Single Query Track)

    - + OpenSMT2 0 207267235.482267202.5142072070339118 @@ -483,7 +483,7 @@

    QF_LRA (Single Query Track)

    - + Par4 0 24087434.13632896.976240024030611 @@ -492,7 +492,7 @@

    QF_LRA (Single Query Track)

    - + SPASS-SATT 0 23938308.8338310.744239023930719 @@ -501,7 +501,7 @@

    QF_LRA (Single Query Track)

    - + 2018-CVC4n 0 23946615.09746718.182239023930725 @@ -510,7 +510,7 @@

    QF_LRA (Single Query Track)

    - + CVC4 0 23747501.54247542.023237023730931 @@ -519,7 +519,7 @@

    QF_LRA (Single Query Track)

    - + Yices 2.6.2 0 23557857.37457859.45235023531131 @@ -528,7 +528,7 @@

    QF_LRA (Single Query Track)

    - + veriT 0 23261171.11261146.467232023231451 @@ -537,7 +537,7 @@

    QF_LRA (Single Query Track)

    - + CVC4-SymBreakn 0 23285541.85185656.362232023231441 @@ -546,7 +546,7 @@

    QF_LRA (Single Query Track)

    - + OpenSMT2 0 22196241.76196244.8682210221325118 @@ -555,7 +555,7 @@

    QF_LRA (Single Query Track)

    - + Z3n 0 219101922.678101926.357219021932799 @@ -564,7 +564,7 @@

    QF_LRA (Single Query Track)

    - + SMTInterpol 0 216136533.367133764.843216021633060 @@ -573,7 +573,7 @@

    QF_LRA (Single Query Track)

    - + Ctrl-Ergo 0 189304985.274179179.831890189357102 @@ -593,7 +593,7 @@

    QF_LRA (Single Query Track)

    - + Yices 2.6.2 0 3924793.3274769.744392229163154154 @@ -602,7 +602,7 @@

    QF_LRA (Single Query Track)

    - + Par4 0 3859154.4465236.39385216169161161 @@ -611,7 +611,7 @@

    QF_LRA (Single Query Track)

    - + SPASS-SATT 0 3815217.6095217.77381219162165165 @@ -620,7 +620,7 @@

    QF_LRA (Single Query Track)

    - + CVC4 0 3326179.6766177.607332182150214214 @@ -629,7 +629,7 @@

    QF_LRA (Single Query Track)

    - + veriT 0 3286244.7736244.765328174154218218 @@ -638,7 +638,7 @@

    QF_LRA (Single Query Track)

    - + 2018-CVC4n 0 3146471.2056467.019314170144232232 @@ -647,7 +647,7 @@

    QF_LRA (Single Query Track)

    - + Z3n 0 2836937.8376933.156283146137263263 @@ -656,7 +656,7 @@

    QF_LRA (Single Query Track)

    - + OpenSMT2 0 2787230.7877224.558278144134268268 @@ -665,7 +665,7 @@

    QF_LRA (Single Query Track)

    - + SMTInterpol 0 2769050.0967576.249276167109270270 @@ -674,7 +674,7 @@

    QF_LRA (Single Query Track)

    - + Ctrl-Ergo 0 26710902.2637788.718267149118279279 @@ -683,7 +683,7 @@

    QF_LRA (Single Query Track)

    - + CVC4-SymBreakn 0 2618175.3418167.78261147114285285 @@ -707,7 +707,6 @@

    QF_LRA (Single Query Track)

    - + - diff --git a/archive/2019/results/qf-lra-unsat-core.html b/archive/2019/results/qf-lra-unsat-core.html index 3cbcac47..f9791586 100644 --- a/archive/2019/results/qf-lra-unsat-core.html +++ b/archive/2019/results/qf-lra-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LRA (Unsat Core Track)

    Competition results for the QF_LRA - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_LRA (Unsat Core Track)

    Sequential PerformanceParallel Performance Yices 2.6.2Yices 2.6.2 - - + + @@ -126,7 +126,7 @@

    QF_LRA (Unsat Core Track)

    - + Yices 2.6.2 0 152565 @@ -137,7 +137,7 @@

    QF_LRA (Unsat Core Track)

    - + CVC4-uc 0 149296 @@ -148,7 +148,7 @@

    QF_LRA (Unsat Core Track)

    - + 2018-SMTInterpol (unsat core)n 0 148987 @@ -159,7 +159,7 @@

    QF_LRA (Unsat Core Track)

    - + Z3n 0 128066 @@ -170,7 +170,7 @@

    QF_LRA (Unsat Core Track)

    - + SMTInterpol 0 110008 @@ -192,7 +192,7 @@

    QF_LRA (Unsat Core Track)

    - + Yices 2.6.2 0 152565117787.755117791.02931 @@ -201,7 +201,7 @@

    QF_LRA (Unsat Core Track)

    - + CVC4-uc 0 14929646961.44146967.0953 @@ -210,7 +210,7 @@

    QF_LRA (Unsat Core Track)

    - + 2018-SMTInterpol (unsat core)n 0 14898786877.65676175.74414 @@ -219,7 +219,7 @@

    QF_LRA (Unsat Core Track)

    - + Z3n 0 12806685964.39585954.27419 @@ -228,7 +228,7 @@

    QF_LRA (Unsat Core Track)

    - + SMTInterpol 0 110008155484.085151624.53132 @@ -252,7 +252,6 @@

    QF_LRA (Unsat Core Track)

    - + - diff --git a/archive/2019/results/qf-nia-incremental.html b/archive/2019/results/qf-nia-incremental.html index d1e327d8..b0759617 100644 --- a/archive/2019/results/qf-nia-incremental.html +++ b/archive/2019/results/qf-nia-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_NIA (Incremental Track)

    Competition results for the QF_NIA - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_NIA (Incremental Track)

    Parallel Performance MathSAT-default - - + + @@ -124,7 +124,7 @@

    QF_NIA (Incremental Track)

    - + MathSAT-default 0 41816571765.6191548.16200 @@ -133,7 +133,7 @@

    QF_NIA (Incremental Track)

    - + MathSAT-na-ext 0 41816571798.0521583.03800 @@ -142,7 +142,7 @@

    QF_NIA (Incremental Track)

    - + 2018-CVC4 (incremental)n 0 41655159395.7199121.03161421 @@ -151,7 +151,7 @@

    QF_NIA (Incremental Track)

    - + CVC4-inc 0 363550914554.65814363.5255461483 @@ -160,7 +160,7 @@

    QF_NIA (Incremental Track)

    - + Z3n 0 32504026375.3766233.9539312552 @@ -169,7 +169,7 @@

    QF_NIA (Incremental Track)

    - + Yices 2.6.2 Incremental 0 3780424000.024000.0414385310 @@ -193,7 +193,6 @@

    QF_NIA (Incremental Track)

    - + - diff --git a/archive/2019/results/qf-nia-single-query.html b/archive/2019/results/qf-nia-single-query.html index 4e8e3c98..3d6bc144 100644 --- a/archive/2019/results/qf-nia-single-query.html +++ b/archive/2019/results/qf-nia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_NIA (Single Query Track)

    Competition results for the QF_NIA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_NIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Par4Par4Par4 - - + + Par4 - - + + Par4 - + @@ -131,7 +131,7 @@

    QF_NIA (Single Query Track)

    - + Par4 0 8971 @@ -142,7 +142,7 @@

    QF_NIA (Single Query Track)

    - + CVC4 0 8289 @@ -153,7 +153,7 @@

    QF_NIA (Single Query Track)

    - + CVC4-SymBreakn 0 7936 @@ -164,7 +164,7 @@

    QF_NIA (Single Query Track)

    - + Yices 2.6.2 0 7642 @@ -175,7 +175,7 @@

    QF_NIA (Single Query Track)

    - + 2018-CVC4n 0 7387 @@ -186,7 +186,7 @@

    QF_NIA (Single Query Track)

    - + Z3n 0 5276 @@ -197,7 +197,7 @@

    QF_NIA (Single Query Track)

    - + AProVE 0 4165 @@ -208,7 +208,7 @@

    QF_NIA (Single Query Track)

    - + SMT-RAT 0 1041 @@ -219,7 +219,7 @@

    QF_NIA (Single Query Track)

    - + ProB 0 348 @@ -230,7 +230,7 @@

    QF_NIA (Single Query Track)

    - + MathSAT-default 2 8109 @@ -241,7 +241,7 @@

    QF_NIA (Single Query Track)

    - + MathSAT-na-ext 5 7892 @@ -263,7 +263,7 @@

    QF_NIA (Single Query Track)

    - + Par4 0 90847001271.3176089981.49790846229285524102255 @@ -272,7 +272,7 @@

    QF_NIA (Single Query Track)

    - + CVC4 0 82899304966.7499312543.25682895785250432053145 @@ -281,7 +281,7 @@

    QF_NIA (Single Query Track)

    - + CVC4-SymBreakn 0 793610246658.57810253017.15379365542239435583526 @@ -290,7 +290,7 @@

    QF_NIA (Single Query Track)

    - + Yices 2.6.2 0 76429542876.649542746.00576425106253638523852 @@ -299,7 +299,7 @@

    QF_NIA (Single Query Track)

    - + 2018-CVC4n 0 738710408606.76310412159.24873875101228641074019 @@ -308,7 +308,7 @@

    QF_NIA (Single Query Track)

    - + Z3n 0 527615590372.76715590570.7752763344193262186010 @@ -317,7 +317,7 @@

    QF_NIA (Single Query Track)

    - + AProVE 0 416818008043.7717992440.43741684168073267230 @@ -326,7 +326,7 @@

    QF_NIA (Single Query Track)

    - + SMT-RAT 0 104125125565.85525126187.32210417642771045310426 @@ -335,7 +335,7 @@

    QF_NIA (Single Query Track)

    - + ProB 0 34819370042.84519460928.28734826484111467822 @@ -344,7 +344,7 @@

    QF_NIA (Single Query Track)

    - + MathSAT-default 2 81098847365.38847651.86381095564254533853382 @@ -353,7 +353,7 @@

    QF_NIA (Single Query Track)

    - + MathSAT-na-ext 5 78929213466.4119213426.97778925382251036023597 @@ -373,7 +373,7 @@

    QF_NIA (Single Query Track)

    - + Par4 0 62292107070.9751402095.52162296229052652255 @@ -382,7 +382,7 @@

    QF_NIA (Single Query Track)

    - + CVC4 0 57853642862.1473649537.48557855785057093145 @@ -391,7 +391,7 @@

    QF_NIA (Single Query Track)

    - + CVC4-SymBreakn 0 55424377181.8224383045.53655425542059523526 @@ -400,7 +400,7 @@

    QF_NIA (Single Query Track)

    - + Yices 2.6.2 0 51064089051.9784088940.87851065106063883852 @@ -409,7 +409,7 @@

    QF_NIA (Single Query Track)

    - + 2018-CVC4n 0 51014399140.5364402593.56251015101063934019 @@ -418,7 +418,7 @@

    QF_NIA (Single Query Track)

    - + AProVE 0 41686701612.376686020.28541684168073267230 @@ -427,7 +427,7 @@

    QF_NIA (Single Query Track)

    - + Z3n 0 33448761977.1628762119.98433443344081506010 @@ -436,7 +436,7 @@

    QF_NIA (Single Query Track)

    - + SMT-RAT 0 76414340413.55714340552.44976476401073010426 @@ -445,7 +445,7 @@

    QF_NIA (Single Query Track)

    - + ProB 0 26411344334.43211378531.2812642640112307822 @@ -454,7 +454,7 @@

    QF_NIA (Single Query Track)

    - + MathSAT-default 2 55643338667.1013338964.14455645564059303382 @@ -463,7 +463,7 @@

    QF_NIA (Single Query Track)

    - + MathSAT-na-ext 5 53823648017.3623648046.42153825382061123597 @@ -483,7 +483,7 @@

    QF_NIA (Single Query Track)

    - + Par4 0 2855516599.873332021.67628550285586392255 @@ -492,7 +492,7 @@

    QF_NIA (Single Query Track)

    - + MathSAT-default 0 25451152698.1991152687.71925450254589493382 @@ -501,7 +501,7 @@

    QF_NIA (Single Query Track)

    - + Yices 2.6.2 0 25361100224.6621100205.12725360253689583852 @@ -510,7 +510,7 @@

    QF_NIA (Single Query Track)

    - + MathSAT-na-ext 0 25101209449.0491209380.55625100251089843597 @@ -519,7 +519,7 @@

    QF_NIA (Single Query Track)

    - + CVC4 0 25041308504.6021309405.77125040250489903145 @@ -528,7 +528,7 @@

    QF_NIA (Single Query Track)

    - + CVC4-SymBreakn 0 23941523113.8981523579.48623940239491003526 @@ -537,7 +537,7 @@

    QF_NIA (Single Query Track)

    - + 2018-CVC4n 0 22861662846.5551662930.77922860228692084019 @@ -546,7 +546,7 @@

    QF_NIA (Single Query Track)

    - + Z3n 0 19322693739.8232693733.16119320193295626010 @@ -555,7 +555,7 @@

    QF_NIA (Single Query Track)

    - + SMT-RAT 0 2776438298.1496438758.95627702771121710426 @@ -564,7 +564,7 @@

    QF_NIA (Single Query Track)

    - + ProB 0 844812505.1994845772.49284084114107822 @@ -573,7 +573,7 @@

    QF_NIA (Single Query Track)

    - + AProVE 0 07087204.3857087203.018000114947230 @@ -593,7 +593,7 @@

    QF_NIA (Single Query Track)

    - + Par4 0 7994159469.142104683.85379945374262035003345 @@ -602,7 +602,7 @@

    QF_NIA (Single Query Track)

    - + Yices 2.6.2 0 6539134264.703134175.09965394209233049554955 @@ -611,7 +611,7 @@

    QF_NIA (Single Query Track)

    - + CVC4 0 5043174751.229174566.81650433226181764516391 @@ -620,7 +620,7 @@

    QF_NIA (Single Query Track)

    - + 2018-CVC4n 0 4982176849.047176667.71249823182180065126453 @@ -629,7 +629,7 @@

    QF_NIA (Single Query Track)

    - + CVC4-SymBreakn 0 4761185243.739185115.4547613046171567336722 @@ -638,7 +638,7 @@

    QF_NIA (Single Query Track)

    - + Z3n 0 3088219342.011219160.75230881824126484068398 @@ -647,7 +647,7 @@

    QF_NIA (Single Query Track)

    - + AProVE 0 3003219126.35211881.33430033003084918395 @@ -656,7 +656,7 @@

    QF_NIA (Single Query Track)

    - + SMT-RAT 0 828257727.345257734.0418285832451066610663 @@ -665,7 +665,7 @@

    QF_NIA (Single Query Track)

    - + ProB 0 290212949.663213016.72729023357112048493 @@ -674,7 +674,7 @@

    QF_NIA (Single Query Track)

    - + MathSAT-default 1 5932159519.94159330.21459323916201655625560 @@ -683,7 +683,7 @@

    QF_NIA (Single Query Track)

    - + MathSAT-na-ext 3 5855160069.328159897.90958553868198756395636 @@ -707,7 +707,6 @@

    QF_NIA (Single Query Track)

    - + - diff --git a/archive/2019/results/qf-nia-unsat-core.html b/archive/2019/results/qf-nia-unsat-core.html index ba43ddb1..5ce8818f 100644 --- a/archive/2019/results/qf-nia-unsat-core.html +++ b/archive/2019/results/qf-nia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_NIA (Unsat Core Track)

    Competition results for the QF_NIA - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_NIA (Unsat Core Track)

    Sequential PerformanceParallel Performance MathSAT-defaultMathSAT-default - - + + @@ -126,7 +126,7 @@

    QF_NIA (Unsat Core Track)

    - + MathSAT-default 0 24744 @@ -137,7 +137,7 @@

    QF_NIA (Unsat Core Track)

    - + MathSAT-na-ext 0 24461 @@ -148,7 +148,7 @@

    QF_NIA (Unsat Core Track)

    - + 2018-Z3 (unsat core)n 0 15659 @@ -159,7 +159,7 @@

    QF_NIA (Unsat Core Track)

    - + Z3n 0 15392 @@ -170,7 +170,7 @@

    QF_NIA (Unsat Core Track)

    - + CVC4-uc 0 0 @@ -192,7 +192,7 @@

    QF_NIA (Unsat Core Track)

    - + MathSAT-default 0 24744356114.369355921.683103 @@ -201,7 +201,7 @@

    QF_NIA (Unsat Core Track)

    - + MathSAT-na-ext 0 24461393020.207392929.893123 @@ -210,7 +210,7 @@

    QF_NIA (Unsat Core Track)

    - + 2018-Z3 (unsat core)n 0 156591495459.3161495368.455411 @@ -219,7 +219,7 @@

    QF_NIA (Unsat Core Track)

    - + Z3n 0 153921515115.1931514966.529441 @@ -228,7 +228,7 @@

    QF_NIA (Unsat Core Track)

    - + CVC4-uc 0 04416509.5564416536.5211763 @@ -252,7 +252,6 @@

    QF_NIA (Unsat Core Track)

    - + - diff --git a/archive/2019/results/qf-nira-single-query.html b/archive/2019/results/qf-nira-single-query.html index 3a4ba6b7..2e84457e 100644 --- a/archive/2019/results/qf-nira-single-query.html +++ b/archive/2019/results/qf-nira-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_NIRA (Single Query Track)

    Competition results for the QF_NIRA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_NIRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) SMT-RATSMT-RAT— - - + + SMT-RAT - - + + — - + @@ -131,7 +131,7 @@

    QF_NIRA (Single Query Track)

    - + 2018-SMTRAT-Ratn 0 2 @@ -142,7 +142,7 @@

    QF_NIRA (Single Query Track)

    - + SMT-RAT 0 2 @@ -153,7 +153,7 @@

    QF_NIRA (Single Query Track)

    - + Z3n 0 1 @@ -164,7 +164,7 @@

    QF_NIRA (Single Query Track)

    - + CVC4 0 1 @@ -175,7 +175,7 @@

    QF_NIRA (Single Query Track)

    - + MathSAT-default 0 1 @@ -186,7 +186,7 @@

    QF_NIRA (Single Query Track)

    - + MathSAT-na-ext 0 0 @@ -197,7 +197,7 @@

    QF_NIRA (Single Query Track)

    - + Yices 2.6.2 0 0 @@ -219,7 +219,7 @@

    QF_NIRA (Single Query Track)

    - + 2018-SMTRAT-Ratn 0 2296.166296.18420200 @@ -228,7 +228,7 @@

    QF_NIRA (Single Query Track)

    - + SMT-RAT 0 2487.525487.59820200 @@ -237,7 +237,7 @@

    QF_NIRA (Single Query Track)

    - + Z3n 0 12402.562402.5610111 @@ -246,7 +246,7 @@

    QF_NIRA (Single Query Track)

    - + CVC4 0 12441.5152441.51810111 @@ -255,7 +255,7 @@

    QF_NIRA (Single Query Track)

    - + MathSAT-default 0 13499.053499.2610111 @@ -264,7 +264,7 @@

    QF_NIRA (Single Query Track)

    - + MathSAT-na-ext 0 04800.04800.000022 @@ -273,7 +273,7 @@

    QF_NIRA (Single Query Track)

    - + Yices 2.6.2 0 04800.04800.000022 @@ -293,7 +293,7 @@

    QF_NIRA (Single Query Track)

    - + 2018-SMTRAT-Ratn 0 00.00.000020 @@ -302,7 +302,7 @@

    QF_NIRA (Single Query Track)

    - + CVC4 0 00.00.000021 @@ -311,7 +311,7 @@

    QF_NIRA (Single Query Track)

    - + MathSAT-default 0 00.00.000021 @@ -320,7 +320,7 @@

    QF_NIRA (Single Query Track)

    - + MathSAT-na-ext 0 00.00.000022 @@ -329,7 +329,7 @@

    QF_NIRA (Single Query Track)

    - + SMT-RAT 0 00.00.000020 @@ -338,7 +338,7 @@

    QF_NIRA (Single Query Track)

    - + Yices 2.6.2 0 00.00.000022 @@ -347,7 +347,7 @@

    QF_NIRA (Single Query Track)

    - + Z3n 0 00.00.000021 @@ -367,7 +367,7 @@

    QF_NIRA (Single Query Track)

    - + 2018-SMTRAT-Ratn 0 2296.166296.18420200 @@ -376,7 +376,7 @@

    QF_NIRA (Single Query Track)

    - + SMT-RAT 0 2487.525487.59820200 @@ -385,7 +385,7 @@

    QF_NIRA (Single Query Track)

    - + Z3n 0 12402.562402.5610111 @@ -394,7 +394,7 @@

    QF_NIRA (Single Query Track)

    - + CVC4 0 12441.5152441.51810111 @@ -403,7 +403,7 @@

    QF_NIRA (Single Query Track)

    - + MathSAT-default 0 13499.053499.2610111 @@ -412,7 +412,7 @@

    QF_NIRA (Single Query Track)

    - + MathSAT-na-ext 0 04800.04800.000022 @@ -421,7 +421,7 @@

    QF_NIRA (Single Query Track)

    - + Yices 2.6.2 0 04800.04800.000022 @@ -441,7 +441,7 @@

    QF_NIRA (Single Query Track)

    - + Z3n 0 126.5626.5610111 @@ -450,7 +450,7 @@

    QF_NIRA (Single Query Track)

    - + 2018-SMTRAT-Ratn 0 048.048.000022 @@ -459,7 +459,7 @@

    QF_NIRA (Single Query Track)

    - + CVC4 0 048.048.000022 @@ -468,7 +468,7 @@

    QF_NIRA (Single Query Track)

    - + MathSAT-default 0 048.048.000022 @@ -477,7 +477,7 @@

    QF_NIRA (Single Query Track)

    - + MathSAT-na-ext 0 048.048.000022 @@ -486,7 +486,7 @@

    QF_NIRA (Single Query Track)

    - + SMT-RAT 0 048.048.000022 @@ -495,7 +495,7 @@

    QF_NIRA (Single Query Track)

    - + Yices 2.6.2 0 048.048.000022 @@ -519,7 +519,6 @@

    QF_NIRA (Single Query Track)

    - + - diff --git a/archive/2019/results/qf-nira-unsat-core.html b/archive/2019/results/qf-nira-unsat-core.html index 6d4784f5..b485e676 100644 --- a/archive/2019/results/qf-nira-unsat-core.html +++ b/archive/2019/results/qf-nira-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_NIRA (Unsat Core Track)

    Competition results for the QF_NIRA - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_NIRA (Unsat Core Track)

    Sequential PerformanceParallel Performance MathSAT-na-extMathSAT-na-ext - - + + @@ -126,7 +126,7 @@

    QF_NIRA (Unsat Core Track)

    - + MathSAT-na-ext 0 26 @@ -137,7 +137,7 @@

    QF_NIRA (Unsat Core Track)

    - + MathSAT-default 0 26 @@ -148,7 +148,7 @@

    QF_NIRA (Unsat Core Track)

    - + Z3n 0 24 @@ -159,7 +159,7 @@

    QF_NIRA (Unsat Core Track)

    - + 2018-Z3 (unsat core)n 0 24 @@ -170,7 +170,7 @@

    QF_NIRA (Unsat Core Track)

    - + CVC4-uc 0 24 @@ -192,7 +192,7 @@

    QF_NIRA (Unsat Core Track)

    - + MathSAT-na-ext 0 2627.27727.2780 @@ -201,7 +201,7 @@

    QF_NIRA (Unsat Core Track)

    - + MathSAT-default 0 2636.44136.4490 @@ -210,7 +210,7 @@

    QF_NIRA (Unsat Core Track)

    - + Z3n 0 244.6394.6390 @@ -219,7 +219,7 @@

    QF_NIRA (Unsat Core Track)

    - + 2018-Z3 (unsat core)n 0 248.0178.0180 @@ -228,7 +228,7 @@

    QF_NIRA (Unsat Core Track)

    - + CVC4-uc 0 24545.441545.6720 @@ -252,7 +252,6 @@

    QF_NIRA (Unsat Core Track)

    - + - diff --git a/archive/2019/results/qf-nra-single-query.html b/archive/2019/results/qf-nra-single-query.html index 9a9e8cbc..646e6996 100644 --- a/archive/2019/results/qf-nra-single-query.html +++ b/archive/2019/results/qf-nra-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_NRA (Single Query Track)

    Competition results for the QF_NRA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_NRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Par4Par4Par4 - - + + Par4 - - + + Par4 - + @@ -131,7 +131,7 @@

    QF_NRA (Single Query Track)

    - + Par4 0 2597 @@ -142,7 +142,7 @@

    QF_NRA (Single Query Track)

    - + Yices 2.6.2 0 2164 @@ -153,7 +153,7 @@

    QF_NRA (Single Query Track)

    - + Z3n 0 2081 @@ -164,7 +164,7 @@

    QF_NRA (Single Query Track)

    - + 2018-Z3n 0 2075 @@ -175,7 +175,7 @@

    QF_NRA (Single Query Track)

    - + SMTRAT-MCSAT 0 1973 @@ -186,7 +186,7 @@

    QF_NRA (Single Query Track)

    - + veriT+raSAT+Redlog 0 1869 @@ -197,7 +197,7 @@

    QF_NRA (Single Query Track)

    - + CVC4 0 1778 @@ -208,7 +208,7 @@

    QF_NRA (Single Query Track)

    - + CVC4-SymBreakn 0 1708 @@ -219,7 +219,7 @@

    QF_NRA (Single Query Track)

    - + MathSAT-default 0 1698 @@ -230,7 +230,7 @@

    QF_NRA (Single Query Track)

    - + SMT-RAT 0 1696 @@ -241,7 +241,7 @@

    QF_NRA (Single Query Track)

    - + MathSAT-na-ext 0 1653 @@ -263,7 +263,7 @@

    QF_NRA (Single Query Track)

    - + Par4 0 2613705823.387600324.226261312241389228171 @@ -272,7 +272,7 @@

    QF_NRA (Single Query Track)

    - + Yices 2.6.2 0 21641682098.731682083.516216410651099677677 @@ -281,7 +281,7 @@

    QF_NRA (Single Query Track)

    - + Z3n 0 20811835040.2411835339.402208110661015760602 @@ -290,7 +290,7 @@

    QF_NRA (Single Query Track)

    - + 2018-Z3n 0 20751850291.311850706.701207510631012766608 @@ -299,7 +299,7 @@

    QF_NRA (Single Query Track)

    - + SMTRAT-MCSAT 0 19732144232.5942144232.44619739631010868864 @@ -308,7 +308,7 @@

    QF_NRA (Single Query Track)

    - + veriT+raSAT+Redlog 0 18702355012.9322354111.07918708551015971971 @@ -317,7 +317,7 @@

    QF_NRA (Single Query Track)

    - + CVC4 0 17782768511.5692769210.0031778537124110631063 @@ -326,7 +326,7 @@

    QF_NRA (Single Query Track)

    - + CVC4-SymBreakn 0 17082826404.782827259.5551708475123311331133 @@ -335,7 +335,7 @@

    QF_NRA (Single Query Track)

    - + MathSAT-default 0 16982800292.2832800297.6751698491120711431142 @@ -344,7 +344,7 @@

    QF_NRA (Single Query Track)

    - + SMT-RAT 0 16962793766.1722793715.853169683885811451129 @@ -353,7 +353,7 @@

    QF_NRA (Single Query Track)

    - + MathSAT-na-ext 0 16532910730.1712910633.8671653448120511881188 @@ -373,7 +373,7 @@

    QF_NRA (Single Query Track)

    - + Par4 0 1224316878.4248173.1961224122401617171 @@ -382,7 +382,7 @@

    QF_NRA (Single Query Track)

    - + Z3n 0 1066601549.548601536.0651066106601775602 @@ -391,7 +391,7 @@

    QF_NRA (Single Query Track)

    - + Yices 2.6.2 0 1065620317.022620319.5381065106501776677 @@ -400,7 +400,7 @@

    QF_NRA (Single Query Track)

    - + 2018-Z3n 0 1063609783.414609784.9961063106301778608 @@ -409,7 +409,7 @@

    QF_NRA (Single Query Track)

    - + SMTRAT-MCSAT 0 963852331.327852332.22596396301878864 @@ -418,7 +418,7 @@

    QF_NRA (Single Query Track)

    - + veriT+raSAT+Redlog 0 8551113565.5041113004.53485585501986971 @@ -427,7 +427,7 @@

    QF_NRA (Single Query Track)

    - + SMT-RAT 0 8381147028.6171147039.248838838020031129 @@ -436,7 +436,7 @@

    QF_NRA (Single Query Track)

    - + CVC4 0 5372022571.4172023229.008537537023041063 @@ -445,7 +445,7 @@

    QF_NRA (Single Query Track)

    - + MathSAT-default 0 4911990114.2671990116.461491491023501142 @@ -454,7 +454,7 @@

    QF_NRA (Single Query Track)

    - + CVC4-SymBreakn 0 4752062791.9732063257.826475475023661133 @@ -463,7 +463,7 @@

    QF_NRA (Single Query Track)

    - + MathSAT-na-ext 0 4482096615.5062096619.695448448023931188 @@ -483,7 +483,7 @@

    QF_NRA (Single Query Track)

    - + Par4 0 138993744.98656951.031389013891452171 @@ -492,7 +492,7 @@

    QF_NRA (Single Query Track)

    - + CVC4 0 1241450740.151450780.99512410124116001063 @@ -501,7 +501,7 @@

    QF_NRA (Single Query Track)

    - + CVC4-SymBreakn 0 1233468412.807468801.72912330123316081133 @@ -510,7 +510,7 @@

    QF_NRA (Single Query Track)

    - + MathSAT-default 0 1207514978.016514981.21412070120716341142 @@ -519,7 +519,7 @@

    QF_NRA (Single Query Track)

    - + MathSAT-na-ext 0 1205518914.666518814.17212050120516361188 @@ -528,7 +528,7 @@

    QF_NRA (Single Query Track)

    - + Yices 2.6.2 0 1099766581.708766563.9771099010991742677 @@ -537,7 +537,7 @@

    QF_NRA (Single Query Track)

    - + Z3n 0 1015940116.622940101.8641015010151826602 @@ -546,7 +546,7 @@

    QF_NRA (Single Query Track)

    - + veriT+raSAT+Redlog 0 1015946247.428945906.5451015010151826971 @@ -555,7 +555,7 @@

    QF_NRA (Single Query Track)

    - + 2018-Z3n 0 1012947135.857947136.5591012010121829608 @@ -564,7 +564,7 @@

    QF_NRA (Single Query Track)

    - + SMTRAT-MCSAT 0 1010996701.267996700.221010010101831864 @@ -573,7 +573,7 @@

    QF_NRA (Single Query Track)

    - + SMT-RAT 0 8581351537.5551351476.604858085819831129 @@ -593,7 +593,7 @@

    QF_NRA (Single Query Track)

    - + Par4 0 241617677.20212815.527241611311285425368 @@ -602,7 +602,7 @@

    QF_NRA (Single Query Track)

    - + Z3n 0 204921794.57121764.854204910391010792636 @@ -611,7 +611,7 @@

    QF_NRA (Single Query Track)

    - + 2018-Z3n 0 204221885.6621885.941204210351007799643 @@ -620,7 +620,7 @@

    QF_NRA (Single Query Track)

    - + Yices 2.6.2 0 200521082.83421063.65620051013992836836 @@ -629,7 +629,7 @@

    QF_NRA (Single Query Track)

    - + veriT+raSAT+Redlog 0 181725536.6925520.968181782898910241024 @@ -638,7 +638,7 @@

    QF_NRA (Single Query Track)

    - + SMTRAT-MCSAT 0 181626773.83726767.603181694687010251021 @@ -647,7 +647,7 @@

    QF_NRA (Single Query Track)

    - + SMT-RAT 0 152733215.08833211.721152781771013141304 @@ -656,7 +656,7 @@

    QF_NRA (Single Query Track)

    - + MathSAT-default 0 150735199.96235198.1231507426108113341333 @@ -665,7 +665,7 @@

    QF_NRA (Single Query Track)

    - + MathSAT-na-ext 0 146136402.04236362.1131461390107113801380 @@ -674,7 +674,7 @@

    QF_NRA (Single Query Track)

    - + CVC4 0 134039160.55739126.3811340280106015011501 @@ -683,7 +683,7 @@

    QF_NRA (Single Query Track)

    - + CVC4-SymBreakn 0 129940075.74240080.832129932397615421542 @@ -707,7 +707,6 @@

    QF_NRA (Single Query Track)

    - + - diff --git a/archive/2019/results/qf-nra-unsat-core.html b/archive/2019/results/qf-nra-unsat-core.html index e42a9afc..e46a85c2 100644 --- a/archive/2019/results/qf-nra-unsat-core.html +++ b/archive/2019/results/qf-nra-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_NRA (Unsat Core Track)

    Competition results for the QF_NRA - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_NRA (Unsat Core Track)

    Sequential PerformanceParallel Performance CVC4-ucCVC4-uc - - + + @@ -126,7 +126,7 @@

    QF_NRA (Unsat Core Track)

    - + 2018-CVC4 (unsat core)n 0 177859 @@ -137,7 +137,7 @@

    QF_NRA (Unsat Core Track)

    - + CVC4-uc 0 177705 @@ -148,7 +148,7 @@

    QF_NRA (Unsat Core Track)

    - + MathSAT-default 0 174586 @@ -159,7 +159,7 @@

    QF_NRA (Unsat Core Track)

    - + MathSAT-na-ext 0 174518 @@ -170,7 +170,7 @@

    QF_NRA (Unsat Core Track)

    - + Z3n 0 169491 @@ -192,7 +192,7 @@

    QF_NRA (Unsat Core Track)

    - + 2018-CVC4 (unsat core)n 0 17785994990.41394944.32633 @@ -201,7 +201,7 @@

    QF_NRA (Unsat Core Track)

    - + CVC4-uc 0 17770592972.74992975.19934 @@ -210,7 +210,7 @@

    QF_NRA (Unsat Core Track)

    - + MathSAT-default 0 174586113355.612113359.49641 @@ -219,7 +219,7 @@

    QF_NRA (Unsat Core Track)

    - + MathSAT-na-ext 0 174518114124.61114124.26841 @@ -228,7 +228,7 @@

    QF_NRA (Unsat Core Track)

    - + Z3n 0 16949128654.41728653.7994 @@ -252,7 +252,6 @@

    QF_NRA (Unsat Core Track)

    - + - diff --git a/archive/2019/results/qf-rdl-single-query.html b/archive/2019/results/qf-rdl-single-query.html index 7d0ab5a3..f7d8522b 100644 --- a/archive/2019/results/qf-rdl-single-query.html +++ b/archive/2019/results/qf-rdl-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_RDL (Single Query Track)

    Competition results for the QF_RDL - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_RDL (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Yices 2.6.2Yices 2.6.2Yices 2.6.2 - - + + Yices 2.6.2 - - + + Yices 2.6.2 - + @@ -131,7 +131,7 @@

    QF_RDL (Single Query Track)

    - + Yices 2.6.2 0 214 @@ -142,7 +142,7 @@

    QF_RDL (Single Query Track)

    - + 2018-Yicesn 0 214 @@ -153,7 +153,7 @@

    QF_RDL (Single Query Track)

    - + CVC4 0 211 @@ -164,7 +164,7 @@

    QF_RDL (Single Query Track)

    - + veriT 0 210 @@ -175,7 +175,7 @@

    QF_RDL (Single Query Track)

    - + Z3n 0 209 @@ -186,7 +186,7 @@

    QF_RDL (Single Query Track)

    - + SMTInterpol 0 194 @@ -208,7 +208,7 @@

    QF_RDL (Single Query Track)

    - + Yices 2.6.2 0 21483409.23883409.4552141051093333 @@ -217,7 +217,7 @@

    QF_RDL (Single Query Track)

    - + 2018-Yicesn 0 21483637.36483637.7712141051093333 @@ -226,7 +226,7 @@

    QF_RDL (Single Query Track)

    - + CVC4 0 21195511.09395416.6912111031083636 @@ -235,7 +235,7 @@

    QF_RDL (Single Query Track)

    - + veriT 0 210102701.87102703.1082101041063737 @@ -244,7 +244,7 @@

    QF_RDL (Single Query Track)

    - + Z3n 0 20999848.19899848.6482091021073838 @@ -253,7 +253,7 @@

    QF_RDL (Single Query Track)

    - + SMTInterpol 0 194153890.186152250.169194101935353 @@ -273,7 +273,7 @@

    QF_RDL (Single Query Track)

    - + Yices 2.6.2 0 105741.918741.942105105014233 @@ -282,7 +282,7 @@

    QF_RDL (Single Query Track)

    - + 2018-Yicesn 0 105775.591775.666105105014233 @@ -291,7 +291,7 @@

    QF_RDL (Single Query Track)

    - + veriT 0 1049130.0029130.602104104014337 @@ -300,7 +300,7 @@

    QF_RDL (Single Query Track)

    - + CVC4 0 1039458.8739441.467103103014436 @@ -309,7 +309,7 @@

    QF_RDL (Single Query Track)

    - + Z3n 0 10213437.53713437.757102102014538 @@ -318,7 +318,7 @@

    QF_RDL (Single Query Track)

    - + SMTInterpol 0 10122736.95821890.126101101014653 @@ -338,7 +338,7 @@

    QF_RDL (Single Query Track)

    - + Yices 2.6.2 0 1093467.323467.513109010913833 @@ -347,7 +347,7 @@

    QF_RDL (Single Query Track)

    - + 2018-Yicesn 0 1093661.7733662.105109010913833 @@ -356,7 +356,7 @@

    QF_RDL (Single Query Track)

    - + CVC4 0 1086852.226775.224108010813936 @@ -365,7 +365,7 @@

    QF_RDL (Single Query Track)

    - + Z3n 0 1077210.6617210.891107010714038 @@ -374,7 +374,7 @@

    QF_RDL (Single Query Track)

    - + veriT 0 10614371.86814372.506106010614137 @@ -383,7 +383,7 @@

    QF_RDL (Single Query Track)

    - + SMTInterpol 0 9351953.22851160.0429309315453 @@ -403,7 +403,7 @@

    QF_RDL (Single Query Track)

    - + Yices 2.6.2 0 1951602.0441602.09195100955252 @@ -412,7 +412,7 @@

    QF_RDL (Single Query Track)

    - + 2018-Yicesn 0 1941623.2381623.3194100945353 @@ -421,7 +421,7 @@

    QF_RDL (Single Query Track)

    - + Z3n 0 1792106.942106.99117991886868 @@ -430,7 +430,7 @@

    QF_RDL (Single Query Track)

    - + CVC4 0 1682529.6242511.83716886827979 @@ -439,7 +439,7 @@

    QF_RDL (Single Query Track)

    - + veriT 0 1672312.2072312.18116781868080 @@ -448,7 +448,7 @@

    QF_RDL (Single Query Track)

    - + SMTInterpol 0 1354030.6993338.3261357461112112 @@ -472,7 +472,6 @@

    QF_RDL (Single Query Track)

    - + - diff --git a/archive/2019/results/qf-s-single-query.html b/archive/2019/results/qf-s-single-query.html index 45fccc8a..f3156c02 100644 --- a/archive/2019/results/qf-s-single-query.html +++ b/archive/2019/results/qf-s-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_S (Single Query Track)

    Competition results for the QF_S - + division - + in the Single Query Track.

    @@ -119,7 +119,7 @@

    QF_S (Single Query Track)

    - + CVC4 0 988 @@ -141,7 +141,7 @@

    QF_S (Single Query Track)

    - + CVC4 0 988455.921465.25998840658200 @@ -161,7 +161,7 @@

    QF_S (Single Query Track)

    - + CVC4 0 406445.229454.26340640605820 @@ -181,7 +181,7 @@

    QF_S (Single Query Track)

    - + CVC4 0 58210.69310.99558205824060 @@ -201,7 +201,7 @@

    QF_S (Single Query Track)

    - + CVC4 0 986106.031106.23698640458222 @@ -225,7 +225,6 @@

    QF_S (Single Query Track)

    - + - diff --git a/archive/2019/results/qf-slia-single-query.html b/archive/2019/results/qf-slia-single-query.html index 508e5cfd..f3b31c17 100644 --- a/archive/2019/results/qf-slia-single-query.html +++ b/archive/2019/results/qf-slia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_SLIA (Single Query Track)

    Competition results for the QF_SLIA - + division - + in the Single Query Track.

    @@ -119,7 +119,7 @@

    QF_SLIA (Single Query Track)

    - + CVC4 0 23102 @@ -130,7 +130,7 @@

    QF_SLIA (Single Query Track)

    - + 2018-CVC4n 0 22641 @@ -152,7 +152,7 @@

    QF_SLIA (Single Query Track)

    - + CVC4 0 23102431823.756433516.171231021862444787373 @@ -161,7 +161,7 @@

    QF_SLIA (Single Query Track)

    - + 2018-CVC4n 0 226411485225.3511485348.29822641181814460534533 @@ -181,7 +181,7 @@

    QF_SLIA (Single Query Track)

    - + CVC4 0 18624236998.87238376.28518624186240455173 @@ -190,7 +190,7 @@

    QF_SLIA (Single Query Track)

    - + 2018-CVC4n 0 181811264224.6021264345.076181811818104994533 @@ -210,7 +210,7 @@

    QF_SLIA (Single Query Track)

    - + CVC4 0 447834024.88734339.8864478044781869773 @@ -219,7 +219,7 @@

    QF_SLIA (Single Query Track)

    - + 2018-CVC4n 0 446060200.74960203.22344600446018715533 @@ -239,7 +239,7 @@

    QF_SLIA (Single Query Track)

    - + CVC4 0 2246325644.8225572.80822463180454418712712 @@ -248,7 +248,7 @@

    QF_SLIA (Single Query Track)

    - + 2018-CVC4n 0 2214537021.92336927.9192214517705444010301030 @@ -272,7 +272,6 @@

    QF_SLIA (Single Query Track)

    - + - diff --git a/archive/2019/results/qf-uf-incremental.html b/archive/2019/results/qf-uf-incremental.html index 4f73147a..7fd98bbb 100644 --- a/archive/2019/results/qf-uf-incremental.html +++ b/archive/2019/results/qf-uf-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UF (Incremental Track)

    Competition results for the QF_UF - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_UF (Incremental Track)

    Parallel Performance Yices 2.6.2 Incremental - - + + @@ -124,7 +124,7 @@

    QF_UF (Incremental Track)

    - + Yices 2.6.2 Incremental 0 76695.39578.23500 @@ -133,7 +133,7 @@

    QF_UF (Incremental Track)

    - + Z3n 0 766255.207238.72900 @@ -142,7 +142,7 @@

    QF_UF (Incremental Track)

    - + CVC4-inc 0 766453.169422.81700 @@ -151,7 +151,7 @@

    QF_UF (Incremental Track)

    - + SMTInterpol 0 7661562.596599.67700 @@ -175,7 +175,6 @@

    QF_UF (Incremental Track)

    - + - diff --git a/archive/2019/results/qf-uf-single-query.html b/archive/2019/results/qf-uf-single-query.html index d2b29bdf..e39a8b7c 100644 --- a/archive/2019/results/qf-uf-single-query.html +++ b/archive/2019/results/qf-uf-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UF (Single Query Track)

    Competition results for the QF_UF - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_UF (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Yices 2.6.2Par4Yices 2.6.2 - - + + Par4 - - + + Par4 - + @@ -131,7 +131,7 @@

    QF_UF (Single Query Track)

    - + Yices 2.6.2 0 3512 @@ -142,7 +142,7 @@

    QF_UF (Single Query Track)

    - + 2018-Yicesn 0 3512 @@ -153,7 +153,7 @@

    QF_UF (Single Query Track)

    - + Par4 0 3512 @@ -164,7 +164,7 @@

    QF_UF (Single Query Track)

    - + veriT 0 3512 @@ -175,7 +175,7 @@

    QF_UF (Single Query Track)

    - + CVC4 0 3510 @@ -186,7 +186,7 @@

    QF_UF (Single Query Track)

    - + OpenSMT2 0 3510 @@ -197,7 +197,7 @@

    QF_UF (Single Query Track)

    - + Z3n 0 3476 @@ -208,7 +208,7 @@

    QF_UF (Single Query Track)

    - + SMTInterpol 0 3474 @@ -219,7 +219,7 @@

    QF_UF (Single Query Track)

    - + Alt-Ergo 0 1606 @@ -241,7 +241,7 @@

    QF_UF (Single Query Track)

    - + Par4 0 3512478.936380.62135121452206000 @@ -250,7 +250,7 @@

    QF_UF (Single Query Track)

    - + Yices 2.6.2 0 3512417.659422.38235121452206000 @@ -259,7 +259,7 @@

    QF_UF (Single Query Track)

    - + 2018-Yicesn 0 3512444.924446.41935121452206000 @@ -268,7 +268,7 @@

    QF_UF (Single Query Track)

    - + veriT 0 35121519.3721517.53835121452206000 @@ -277,7 +277,7 @@

    QF_UF (Single Query Track)

    - + CVC4 0 351015685.19515682.42935101452205822 @@ -286,7 +286,7 @@

    QF_UF (Single Query Track)

    - + OpenSMT2 0 351017079.70517063.53535101451205922 @@ -295,7 +295,7 @@

    QF_UF (Single Query Track)

    - + Z3n 0 347693829.33393796.3833476145220243636 @@ -304,7 +304,7 @@

    QF_UF (Single Query Track)

    - + SMTInterpol 0 3474116925.145109649.8443474145220223838 @@ -313,7 +313,7 @@

    QF_UF (Single Query Track)

    - + Alt-Ergo 0 18833259319.0611638425.6851883018831629371 @@ -333,7 +333,7 @@

    QF_UF (Single Query Track)

    - + Yices 2.6.2 0 145251.50253.82314521452020600 @@ -342,7 +342,7 @@

    QF_UF (Single Query Track)

    - + 2018-Yicesn 0 145253.63354.35514521452020600 @@ -351,7 +351,7 @@

    QF_UF (Single Query Track)

    - + Par4 0 145224.35459.96714521452020600 @@ -360,7 +360,7 @@

    QF_UF (Single Query Track)

    - + veriT 0 1452148.176148.05414521452020600 @@ -369,7 +369,7 @@

    QF_UF (Single Query Track)

    - + Z3n 0 1452319.131319.217145214520206036 @@ -378,7 +378,7 @@

    QF_UF (Single Query Track)

    - + SMTInterpol 0 14523215.7281323.107145214520206038 @@ -387,7 +387,7 @@

    QF_UF (Single Query Track)

    - + CVC4 0 14521387.7551387.82914521452020602 @@ -396,7 +396,7 @@

    QF_UF (Single Query Track)

    - + OpenSMT2 0 14515884.115884.94914511451020612 @@ -405,7 +405,7 @@

    QF_UF (Single Query Track)

    - + Alt-Ergo 0 0943721.256737444.4370003512371 @@ -425,7 +425,7 @@

    QF_UF (Single Query Track)

    - + Par4 0 2060454.582320.65420600206014520 @@ -434,7 +434,7 @@

    QF_UF (Single Query Track)

    - + Yices 2.6.2 0 2060366.157368.5620600206014520 @@ -443,7 +443,7 @@

    QF_UF (Single Query Track)

    - + 2018-Yicesn 0 2060391.292392.06420600206014520 @@ -452,7 +452,7 @@

    QF_UF (Single Query Track)

    - + veriT 0 20601371.1961369.48520600206014520 @@ -461,7 +461,7 @@

    QF_UF (Single Query Track)

    - + OpenSMT2 0 205911195.59511178.58720590205914532 @@ -470,7 +470,7 @@

    QF_UF (Single Query Track)

    - + CVC4 0 205814297.43914294.620580205814542 @@ -479,7 +479,7 @@

    QF_UF (Single Query Track)

    - + Z3n 0 202493510.20293477.166202402024148836 @@ -488,7 +488,7 @@

    QF_UF (Single Query Track)

    - + SMTInterpol 0 2022113709.417108326.737202202022149038 @@ -497,7 +497,7 @@

    QF_UF (Single Query Track)

    - + Alt-Ergo 0 18832315597.805900981.2471883018831629371 @@ -517,7 +517,7 @@

    QF_UF (Single Query Track)

    - + Par4 0 3510348.876339.45235101452205822 @@ -526,7 +526,7 @@

    QF_UF (Single Query Track)

    - + Yices 2.6.2 0 3509355.218359.93535091452205733 @@ -535,7 +535,7 @@

    QF_UF (Single Query Track)

    - + 2018-Yicesn 0 3509371.652373.13235091452205733 @@ -544,7 +544,7 @@

    QF_UF (Single Query Track)

    - + veriT 0 3508491.257489.32135081452205644 @@ -553,7 +553,7 @@

    QF_UF (Single Query Track)

    - + OpenSMT2 0 34573591.8453574.9873457144120165555 @@ -562,7 +562,7 @@

    QF_UF (Single Query Track)

    - + CVC4 0 34553253.4383249.4933455144620095757 @@ -571,7 +571,7 @@

    QF_UF (Single Query Track)

    - + Z3n 0 34283454.3533450.4863428144819808484 @@ -580,7 +580,7 @@

    QF_UF (Single Query Track)

    - + SMTInterpol 0 341213490.0596898.352341214511961100100 @@ -589,7 +589,7 @@

    QF_UF (Single Query Track)

    - + Alt-Ergo 0 95993869.85566814.792959095925532311 @@ -613,7 +613,6 @@

    QF_UF (Single Query Track)

    - + - diff --git a/archive/2019/results/qf-uf-unsat-core.html b/archive/2019/results/qf-uf-unsat-core.html index bc408948..9a3476ec 100644 --- a/archive/2019/results/qf-uf-unsat-core.html +++ b/archive/2019/results/qf-uf-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UF (Unsat Core Track)

    Competition results for the QF_UF - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_UF (Unsat Core Track)

    Sequential PerformanceParallel Performance Yices 2.6.2Yices 2.6.2 - - + + @@ -126,7 +126,7 @@

    QF_UF (Unsat Core Track)

    - + Yices 2.6.2 0 318280 @@ -137,7 +137,7 @@

    QF_UF (Unsat Core Track)

    - + SMTInterpol 0 318119 @@ -148,7 +148,7 @@

    QF_UF (Unsat Core Track)

    - + Z3n 0 317278 @@ -159,7 +159,7 @@

    QF_UF (Unsat Core Track)

    - + CVC4-uc 0 316995 @@ -170,7 +170,7 @@

    QF_UF (Unsat Core Track)

    - + 2018-CVC4 (unsat core)n 0 316995 @@ -192,7 +192,7 @@

    QF_UF (Unsat Core Track)

    - + Yices 2.6.2 0 3182803577.2683578.7880 @@ -201,7 +201,7 @@

    QF_UF (Unsat Core Track)

    - + SMTInterpol 0 31811916218.1249784.9920 @@ -210,7 +210,7 @@

    QF_UF (Unsat Core Track)

    - + Z3n 0 3172785255.7555252.2570 @@ -219,7 +219,7 @@

    QF_UF (Unsat Core Track)

    - + CVC4-uc 0 3169954626.44615.2740 @@ -228,7 +228,7 @@

    QF_UF (Unsat Core Track)

    - + 2018-CVC4 (unsat core)n 0 3169955802.8745793.5220 @@ -252,7 +252,6 @@

    QF_UF (Unsat Core Track)

    - + - diff --git a/archive/2019/results/qf-ufbv-incremental.html b/archive/2019/results/qf-ufbv-incremental.html index 8104c2a7..34f9e395 100644 --- a/archive/2019/results/qf-ufbv-incremental.html +++ b/archive/2019/results/qf-ufbv-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFBV (Incremental Track)

    Competition results for the QF_UFBV - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_UFBV (Incremental Track)

    Parallel Performance Yices 2.6.2 Incremental - - + + @@ -124,7 +124,7 @@

    QF_UFBV (Incremental Track)

    - + 2018-Boolector (incremental)n 0 29773079.0193053.84200 @@ -133,7 +133,7 @@

    QF_UFBV (Incremental Track)

    - + Yices 2.6.2 Incremental 0 29763176.2833152.3311 @@ -142,7 +142,7 @@

    QF_UFBV (Incremental Track)

    - + Boolector (incremental) 0 29763265.2673240.04511 @@ -151,7 +151,7 @@

    QF_UFBV (Incremental Track)

    - + Z3n 0 293610929.16510902.425413 @@ -160,7 +160,7 @@

    QF_UFBV (Incremental Track)

    - + CVC4-inc 0 286620749.83720669.8111117 @@ -184,7 +184,6 @@

    QF_UFBV (Incremental Track)

    - + - diff --git a/archive/2019/results/qf-ufbv-single-query.html b/archive/2019/results/qf-ufbv-single-query.html index b26e862b..4770a740 100644 --- a/archive/2019/results/qf-ufbv-single-query.html +++ b/archive/2019/results/qf-ufbv-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFBV (Single Query Track)

    Competition results for the QF_UFBV - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_UFBV (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Yices 2.6.2Yices 2.6.2Yices 2.6.2 - - + + Yices 2.6.2 - - + + Yices 2.6.2 - + @@ -131,7 +131,7 @@

    QF_UFBV (Single Query Track)

    - + Yices 2.6.2 0 220 @@ -142,7 +142,7 @@

    QF_UFBV (Single Query Track)

    - + Boolector 0 217 @@ -153,7 +153,7 @@

    QF_UFBV (Single Query Track)

    - + 2018-Boolectorn 0 213 @@ -164,7 +164,7 @@

    QF_UFBV (Single Query Track)

    - + CVC4 0 213 @@ -175,7 +175,7 @@

    QF_UFBV (Single Query Track)

    - + Poolector 0 211 @@ -186,7 +186,7 @@

    QF_UFBV (Single Query Track)

    - + Z3n 0 198 @@ -208,7 +208,7 @@

    QF_UFBV (Single Query Track)

    - + Yices 2.6.2 0 22019121.2719122.3722201338733 @@ -217,7 +217,7 @@

    QF_UFBV (Single Query Track)

    - + Poolector 0 21859220.16523944.4832181338555 @@ -226,7 +226,7 @@

    QF_UFBV (Single Query Track)

    - + Boolector 0 21725475.49625476.9742171338466 @@ -235,7 +235,7 @@

    QF_UFBV (Single Query Track)

    - + 2018-Boolectorn 0 2132039.9532040.27421313281100 @@ -244,7 +244,7 @@

    QF_UFBV (Single Query Track)

    - + CVC4 0 21330582.32730519.228213132811010 @@ -253,7 +253,7 @@

    QF_UFBV (Single Query Track)

    - + Z3n 0 19866305.02166304.916198132662525 @@ -273,7 +273,7 @@

    QF_UFBV (Single Query Track)

    - + Yices 2.6.2 0 133143.623143.6321331330903 @@ -282,7 +282,7 @@

    QF_UFBV (Single Query Track)

    - + Poolector 0 1332252.227589.3541331330905 @@ -291,7 +291,7 @@

    QF_UFBV (Single Query Track)

    - + Boolector 0 1332330.7552331.2141331330906 @@ -300,7 +300,7 @@

    QF_UFBV (Single Query Track)

    - + 2018-Boolectorn 0 132142.195142.2131321320910 @@ -309,7 +309,7 @@

    QF_UFBV (Single Query Track)

    - + Z3n 0 1322540.2172539.37513213209125 @@ -318,7 +318,7 @@

    QF_UFBV (Single Query Track)

    - + CVC4 0 1322794.0442791.49613213209110 @@ -338,7 +338,7 @@

    QF_UFBV (Single Query Track)

    - + Yices 2.6.2 0 8718977.64718978.739870871363 @@ -347,7 +347,7 @@

    QF_UFBV (Single Query Track)

    - + Poolector 0 8556967.93823355.13850851385 @@ -356,7 +356,7 @@

    QF_UFBV (Single Query Track)

    - + Boolector 0 8423144.74123145.76840841396 @@ -365,7 +365,7 @@

    QF_UFBV (Single Query Track)

    - + 2018-Boolectorn 0 811897.7581898.061810811420 @@ -374,7 +374,7 @@

    QF_UFBV (Single Query Track)

    - + CVC4 0 8127788.28227727.7318108114210 @@ -383,7 +383,7 @@

    QF_UFBV (Single Query Track)

    - + Z3n 0 6663764.80463765.5426606615725 @@ -403,7 +403,7 @@

    QF_UFBV (Single Query Track)

    - + 2018-Boolectorn 0 198696.776696.869198132662515 @@ -412,7 +412,7 @@

    QF_UFBV (Single Query Track)

    - + Yices 2.6.2 0 194787.632787.64194132622929 @@ -421,7 +421,7 @@

    QF_UFBV (Single Query Track)

    - + Boolector 0 1931023.5451023.602193130633030 @@ -430,7 +430,7 @@

    QF_UFBV (Single Query Track)

    - + Poolector 0 1891778.4551086.55189130593434 @@ -439,7 +439,7 @@

    QF_UFBV (Single Query Track)

    - + CVC4 0 1881384.3951379.88188132563535 @@ -448,7 +448,7 @@

    QF_UFBV (Single Query Track)

    - + Z3n 0 1821121.1231120.285182131514141 @@ -472,7 +472,6 @@

    QF_UFBV (Single Query Track)

    - + - diff --git a/archive/2019/results/qf-ufbv-unsat-core.html b/archive/2019/results/qf-ufbv-unsat-core.html index 0c1684a1..719071da 100644 --- a/archive/2019/results/qf-ufbv-unsat-core.html +++ b/archive/2019/results/qf-ufbv-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFBV (Unsat Core Track)

    Competition results for the QF_UFBV - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_UFBV (Unsat Core Track)

    Sequential PerformanceParallel Performance Yices 2.6.2Yices 2.6.2 - - + + @@ -126,7 +126,7 @@

    QF_UFBV (Unsat Core Track)

    - + 2018-Z3 (unsat core)n 0 601198 @@ -137,7 +137,7 @@

    QF_UFBV (Unsat Core Track)

    - + Z3n 0 601175 @@ -148,7 +148,7 @@

    QF_UFBV (Unsat Core Track)

    - + Yices 2.6.2 0 596096 @@ -159,7 +159,7 @@

    QF_UFBV (Unsat Core Track)

    - + CVC4-uc 0 526861 @@ -181,7 +181,7 @@

    QF_UFBV (Unsat Core Track)

    - + 2018-Z3 (unsat core)n 0 6011986824.7296825.382 @@ -190,7 +190,7 @@

    QF_UFBV (Unsat Core Track)

    - + Z3n 0 6011756978.8586979.3692 @@ -199,7 +199,7 @@

    QF_UFBV (Unsat Core Track)

    - + Yices 2.6.2 0 5960967366.087366.3133 @@ -208,7 +208,7 @@

    QF_UFBV (Unsat Core Track)

    - + CVC4-uc 0 52686119742.56719742.0168 @@ -232,7 +232,6 @@

    QF_UFBV (Unsat Core Track)

    - + - diff --git a/archive/2019/results/qf-ufidl-single-query.html b/archive/2019/results/qf-ufidl-single-query.html index d20e5484..2d37b7c4 100644 --- a/archive/2019/results/qf-ufidl-single-query.html +++ b/archive/2019/results/qf-ufidl-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFIDL (Single Query Track)

    Competition results for the QF_UFIDL - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_UFIDL (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Yices 2.6.2Yices 2.6.2Yices 2.6.2 - - + + Yices 2.6.2 - - + + Yices 2.6.2 - + @@ -131,7 +131,7 @@

    QF_UFIDL (Single Query Track)

    - + Yices 2.6.2 0 298 @@ -142,7 +142,7 @@

    QF_UFIDL (Single Query Track)

    - + 2018-Yicesn 0 298 @@ -153,7 +153,7 @@

    QF_UFIDL (Single Query Track)

    - + Z3n 0 298 @@ -164,7 +164,7 @@

    QF_UFIDL (Single Query Track)

    - + SMTInterpol 0 285 @@ -175,7 +175,7 @@

    QF_UFIDL (Single Query Track)

    - + veriT 0 278 @@ -186,7 +186,7 @@

    QF_UFIDL (Single Query Track)

    - + CVC4 0 276 @@ -208,7 +208,7 @@

    QF_UFIDL (Single Query Track)

    - + Yices 2.6.2 0 2985406.3075406.3572987622222 @@ -217,7 +217,7 @@

    QF_UFIDL (Single Query Track)

    - + 2018-Yicesn 0 2985419.2635419.3422987622222 @@ -226,7 +226,7 @@

    QF_UFIDL (Single Query Track)

    - + Z3n 0 2985632.3755631.4662987622222 @@ -235,7 +235,7 @@

    QF_UFIDL (Single Query Track)

    - + SMTInterpol 0 28550037.16448281.097285762091515 @@ -244,7 +244,7 @@

    QF_UFIDL (Single Query Track)

    - + veriT 0 27862292.61862293.757278762022217 @@ -253,7 +253,7 @@

    QF_UFIDL (Single Query Track)

    - + CVC4 0 27681639.11881641.402276762002424 @@ -273,7 +273,7 @@

    QF_UFIDL (Single Query Track)

    - + Yices 2.6.2 0 765.9395.956767602242 @@ -282,7 +282,7 @@

    QF_UFIDL (Single Query Track)

    - + 2018-Yicesn 0 769.5879.609767602242 @@ -291,7 +291,7 @@

    QF_UFIDL (Single Query Track)

    - + Z3n 0 7611.29111.292767602242 @@ -300,7 +300,7 @@

    QF_UFIDL (Single Query Track)

    - + veriT 0 7614.01714.07676022417 @@ -309,7 +309,7 @@

    QF_UFIDL (Single Query Track)

    - + CVC4 0 7661.44361.457676022424 @@ -318,7 +318,7 @@

    QF_UFIDL (Single Query Track)

    - + SMTInterpol 0 76303.882111.3767676022415 @@ -338,7 +338,7 @@

    QF_UFIDL (Single Query Track)

    - + Yices 2.6.2 0 2225400.3685400.4012220222782 @@ -347,7 +347,7 @@

    QF_UFIDL (Single Query Track)

    - + 2018-Yicesn 0 2225409.6765409.7332220222782 @@ -356,7 +356,7 @@

    QF_UFIDL (Single Query Track)

    - + Z3n 0 2225621.0855620.1732220222782 @@ -365,7 +365,7 @@

    QF_UFIDL (Single Query Track)

    - + SMTInterpol 0 20949733.28348169.72120902099115 @@ -374,7 +374,7 @@

    QF_UFIDL (Single Query Track)

    - + veriT 0 20262278.60162279.75720202029817 @@ -383,7 +383,7 @@

    QF_UFIDL (Single Query Track)

    - + CVC4 0 20081577.67581579.952200020010024 @@ -403,7 +403,7 @@

    QF_UFIDL (Single Query Track)

    - + Yices 2.6.2 0 294307.8307.8382947621866 @@ -412,7 +412,7 @@

    QF_UFIDL (Single Query Track)

    - + 2018-Yicesn 0 294316.706316.7522947621866 @@ -421,7 +421,7 @@

    QF_UFIDL (Single Query Track)

    - + Z3n 0 291375.46374.4792917621599 @@ -430,7 +430,7 @@

    QF_UFIDL (Single Query Track)

    - + SMTInterpol 0 2563305.3232033.401256761804444 @@ -439,7 +439,7 @@

    QF_UFIDL (Single Query Track)

    - + veriT 0 2541526.7761526.755254761784646 @@ -448,7 +448,7 @@

    QF_UFIDL (Single Query Track)

    - + CVC4 0 2072814.2172814.236207761319393 @@ -472,7 +472,6 @@

    QF_UFIDL (Single Query Track)

    - + - diff --git a/archive/2019/results/qf-ufidl-unsat-core.html b/archive/2019/results/qf-ufidl-unsat-core.html index 49cd19b0..2926285b 100644 --- a/archive/2019/results/qf-ufidl-unsat-core.html +++ b/archive/2019/results/qf-ufidl-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFIDL (Unsat Core Track)

    Competition results for the QF_UFIDL - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_UFIDL (Unsat Core Track)

    Sequential PerformanceParallel Performance Yices 2.6.2Yices 2.6.2 - - + + @@ -126,7 +126,7 @@

    QF_UFIDL (Unsat Core Track)

    - + Yices 2.6.2 0 19 @@ -137,7 +137,7 @@

    QF_UFIDL (Unsat Core Track)

    - + 2018-Yices (unsat core)n 0 19 @@ -148,7 +148,7 @@

    QF_UFIDL (Unsat Core Track)

    - + CVC4-uc 0 19 @@ -159,7 +159,7 @@

    QF_UFIDL (Unsat Core Track)

    - + Z3n 0 19 @@ -170,7 +170,7 @@

    QF_UFIDL (Unsat Core Track)

    - + SMTInterpol 0 19 @@ -192,7 +192,7 @@

    QF_UFIDL (Unsat Core Track)

    - + 2018-Yices (unsat core)n 0 190.0860.1320 @@ -201,7 +201,7 @@

    QF_UFIDL (Unsat Core Track)

    - + Yices 2.6.2 0 190.0770.1360 @@ -210,7 +210,7 @@

    QF_UFIDL (Unsat Core Track)

    - + CVC4-uc 0 190.190.1950 @@ -219,7 +219,7 @@

    QF_UFIDL (Unsat Core Track)

    - + Z3n 0 190.5650.5650 @@ -228,7 +228,7 @@

    QF_UFIDL (Unsat Core Track)

    - + SMTInterpol 0 194.6393.5480 @@ -252,7 +252,6 @@

    QF_UFIDL (Unsat Core Track)

    - + - diff --git a/archive/2019/results/qf-uflia-incremental.html b/archive/2019/results/qf-uflia-incremental.html index a85cd3b9..31351f9f 100644 --- a/archive/2019/results/qf-uflia-incremental.html +++ b/archive/2019/results/qf-uflia-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFLIA (Incremental Track)

    Competition results for the QF_UFLIA - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_UFLIA (Incremental Track)

    Parallel Performance SMTInterpol - - + + @@ -124,7 +124,7 @@

    QF_UFLIA (Incremental Track)

    - + 2018-Z3 (incremental)n 0 762722175266.991175209.006387258 @@ -133,7 +133,7 @@

    QF_UFLIA (Incremental Track)

    - + Z3n 0 759715198487.166198467.982687979 @@ -142,7 +142,7 @@

    QF_UFLIA (Incremental Track)

    - + SMTInterpol 0 757451242787.652241991.895914395 @@ -151,7 +151,7 @@

    QF_UFLIA (Incremental Track)

    - + CVC4-inc 0 756992210123.105210094.716960278 @@ -160,7 +160,7 @@

    QF_UFLIA (Incremental Track)

    - + Yices 2.6.2 Incremental 0 756304192201.894192186.0351029077 @@ -184,7 +184,6 @@

    QF_UFLIA (Incremental Track)

    - + - diff --git a/archive/2019/results/qf-uflia-single-query.html b/archive/2019/results/qf-uflia-single-query.html index c6cd8638..982a2a75 100644 --- a/archive/2019/results/qf-uflia-single-query.html +++ b/archive/2019/results/qf-uflia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFLIA (Single Query Track)

    Competition results for the QF_UFLIA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_UFLIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Yices 2.6.2Yices 2.6.2Yices 2.6.2 - - + + Yices 2.6.2 - - + + Yices 2.6.2 - + @@ -131,7 +131,7 @@

    QF_UFLIA (Single Query Track)

    - + 2018-Yicesn 0 300 @@ -142,7 +142,7 @@

    QF_UFLIA (Single Query Track)

    - + Yices 2.6.2 0 300 @@ -153,7 +153,7 @@

    QF_UFLIA (Single Query Track)

    - + Z3n 0 300 @@ -164,7 +164,7 @@

    QF_UFLIA (Single Query Track)

    - + CVC4 0 300 @@ -175,7 +175,7 @@

    QF_UFLIA (Single Query Track)

    - + SMTInterpol 0 300 @@ -186,7 +186,7 @@

    QF_UFLIA (Single Query Track)

    - + veriT 0 262 @@ -197,7 +197,7 @@

    QF_UFLIA (Single Query Track)

    - + Alt-Ergo 0 64 @@ -219,7 +219,7 @@

    QF_UFLIA (Single Query Track)

    - + 2018-Yicesn 0 30030.62530.7643002336700 @@ -228,7 +228,7 @@

    QF_UFLIA (Single Query Track)

    - + Yices 2.6.2 0 30031.35231.5713002336700 @@ -237,7 +237,7 @@

    QF_UFLIA (Single Query Track)

    - + Z3n 0 30065.62365.6363002336700 @@ -246,7 +246,7 @@

    QF_UFLIA (Single Query Track)

    - + CVC4 0 300318.094318.1473002336700 @@ -255,7 +255,7 @@

    QF_UFLIA (Single Query Track)

    - + SMTInterpol 0 3001122.714574.573002336700 @@ -264,7 +264,7 @@

    QF_UFLIA (Single Query Track)

    - + veriT 0 26248832.03648784.18226219567386 @@ -273,7 +273,7 @@

    QF_UFLIA (Single Query Track)

    - + Alt-Ergo 0 65180660.337174403.5816506523561 @@ -293,7 +293,7 @@

    QF_UFLIA (Single Query Track)

    - + Yices 2.6.2 0 2336.0496.1892332330670 @@ -302,7 +302,7 @@

    QF_UFLIA (Single Query Track)

    - + 2018-Yicesn 0 2336.1446.2262332330670 @@ -311,7 +311,7 @@

    QF_UFLIA (Single Query Track)

    - + Z3n 0 23346.23646.2482332330670 @@ -320,7 +320,7 @@

    QF_UFLIA (Single Query Track)

    - + CVC4 0 233107.235107.282332330670 @@ -329,7 +329,7 @@

    QF_UFLIA (Single Query Track)

    - + SMTInterpol 0 233788.13365.8922332330670 @@ -338,7 +338,7 @@

    QF_UFLIA (Single Query Track)

    - + veriT 0 19545400.38545352.23619519501056 @@ -347,7 +347,7 @@

    QF_UFLIA (Single Query Track)

    - + Alt-Ergo 0 0169962.705168111.83600030061 @@ -367,7 +367,7 @@

    QF_UFLIA (Single Query Track)

    - + Z3n 0 6719.38619.388670672330 @@ -376,7 +376,7 @@

    QF_UFLIA (Single Query Track)

    - + 2018-Yicesn 0 6724.48124.538670672330 @@ -385,7 +385,7 @@

    QF_UFLIA (Single Query Track)

    - + Yices 2.6.2 0 6725.30325.382670672330 @@ -394,7 +394,7 @@

    QF_UFLIA (Single Query Track)

    - + SMTInterpol 0 67334.585208.678670672330 @@ -403,7 +403,7 @@

    QF_UFLIA (Single Query Track)

    - + CVC4 0 67210.859210.867670672330 @@ -412,7 +412,7 @@

    QF_UFLIA (Single Query Track)

    - + veriT 0 673431.6513431.946670672336 @@ -421,7 +421,7 @@

    QF_UFLIA (Single Query Track)

    - + Alt-Ergo 0 6510697.6326291.7456506523561 @@ -441,7 +441,7 @@

    QF_UFLIA (Single Query Track)

    - + 2018-Yicesn 0 30030.62530.7643002336700 @@ -450,7 +450,7 @@

    QF_UFLIA (Single Query Track)

    - + Yices 2.6.2 0 30031.35231.5713002336700 @@ -459,7 +459,7 @@

    QF_UFLIA (Single Query Track)

    - + Z3n 0 30065.62365.6363002336700 @@ -468,7 +468,7 @@

    QF_UFLIA (Single Query Track)

    - + CVC4 0 297256.833256.8672972336433 @@ -477,7 +477,7 @@

    QF_UFLIA (Single Query Track)

    - + SMTInterpol 0 297924.242421.7412972326533 @@ -486,7 +486,7 @@

    QF_UFLIA (Single Query Track)

    - + veriT 0 1693965.7143965.71116911653131123 @@ -495,7 +495,7 @@

    QF_UFLIA (Single Query Track)

    - + Alt-Ergo 0 624737.9983620.51462062238123 @@ -519,7 +519,6 @@

    QF_UFLIA (Single Query Track)

    - + - diff --git a/archive/2019/results/qf-uflia-unsat-core.html b/archive/2019/results/qf-uflia-unsat-core.html index 09c3fb77..4ecfca1b 100644 --- a/archive/2019/results/qf-uflia-unsat-core.html +++ b/archive/2019/results/qf-uflia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFLIA (Unsat Core Track)

    Competition results for the QF_UFLIA - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_UFLIA (Unsat Core Track)

    Sequential PerformanceParallel Performance Yices 2.6.2Yices 2.6.2 - - + + @@ -126,7 +126,7 @@

    QF_UFLIA (Unsat Core Track)

    - + Yices 2.6.2 0 21 @@ -137,7 +137,7 @@

    QF_UFLIA (Unsat Core Track)

    - + 2018-Z3 (unsat core)n 0 21 @@ -148,7 +148,7 @@

    QF_UFLIA (Unsat Core Track)

    - + Z3n 0 21 @@ -159,7 +159,7 @@

    QF_UFLIA (Unsat Core Track)

    - + SMTInterpol 0 21 @@ -170,7 +170,7 @@

    QF_UFLIA (Unsat Core Track)

    - + CVC4-uc 0 18 @@ -192,7 +192,7 @@

    QF_UFLIA (Unsat Core Track)

    - + Yices 2.6.2 0 210.0990.2320 @@ -201,7 +201,7 @@

    QF_UFLIA (Unsat Core Track)

    - + 2018-Z3 (unsat core)n 0 210.6680.6690 @@ -210,7 +210,7 @@

    QF_UFLIA (Unsat Core Track)

    - + Z3n 0 210.710.710 @@ -219,7 +219,7 @@

    QF_UFLIA (Unsat Core Track)

    - + SMTInterpol 0 215.8434.4870 @@ -228,7 +228,7 @@

    QF_UFLIA (Unsat Core Track)

    - + CVC4-uc 0 180.250.2760 @@ -252,7 +252,6 @@

    QF_UFLIA (Unsat Core Track)

    - + - diff --git a/archive/2019/results/qf-uflra-incremental.html b/archive/2019/results/qf-uflra-incremental.html index 833da0ca..efc68c5e 100644 --- a/archive/2019/results/qf-uflra-incremental.html +++ b/archive/2019/results/qf-uflra-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFLRA (Incremental Track)

    Competition results for the QF_UFLRA - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_UFLRA (Incremental Track)

    Parallel Performance Yices 2.6.2 Incremental - - + + @@ -124,7 +124,7 @@

    QF_UFLRA (Incremental Track)

    - + Z3n 0 835112199.0511937.68200 @@ -133,7 +133,7 @@

    QF_UFLRA (Incremental Track)

    - + 2018-Z3 (incremental)n 0 835115174.15214906.16900 @@ -142,7 +142,7 @@

    QF_UFLRA (Incremental Track)

    - + Yices 2.6.2 Incremental 0 834722895.64922654.76742 @@ -151,7 +151,7 @@

    QF_UFLRA (Incremental Track)

    - + CVC4-inc 0 834158141.83957724.599104 @@ -160,7 +160,7 @@

    QF_UFLRA (Incremental Track)

    - + SMTInterpol 0 833066045.19658093.398219 @@ -184,7 +184,6 @@

    QF_UFLRA (Incremental Track)

    - + - diff --git a/archive/2019/results/qf-uflra-single-query.html b/archive/2019/results/qf-uflra-single-query.html index 6cd5343b..a17e1535 100644 --- a/archive/2019/results/qf-uflra-single-query.html +++ b/archive/2019/results/qf-uflra-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFLRA (Single Query Track)

    Competition results for the QF_UFLRA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_UFLRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) SMTInterpolSMTInterpolSMTInterpol - - + + Yices 2.6.2 - - + + Yices 2.6.2 - + @@ -131,7 +131,7 @@

    QF_UFLRA (Single Query Track)

    - + SMTInterpol 0 541 @@ -142,7 +142,7 @@

    QF_UFLRA (Single Query Track)

    - + Yices 2.6.2 0 540 @@ -153,7 +153,7 @@

    QF_UFLRA (Single Query Track)

    - + 2018-Yicesn 0 540 @@ -164,7 +164,7 @@

    QF_UFLRA (Single Query Track)

    - + Z3n 0 540 @@ -175,7 +175,7 @@

    QF_UFLRA (Single Query Track)

    - + veriT 0 540 @@ -186,7 +186,7 @@

    QF_UFLRA (Single Query Track)

    - + CVC4 0 539 @@ -197,7 +197,7 @@

    QF_UFLRA (Single Query Track)

    - + Alt-Ergo 0 200 @@ -219,7 +219,7 @@

    QF_UFLRA (Single Query Track)

    - + SMTInterpol 0 5414649.2183214.41154133021100 @@ -228,7 +228,7 @@

    QF_UFLRA (Single Query Track)

    - + Yices 2.6.2 0 5402563.3692563.7854032921111 @@ -237,7 +237,7 @@

    QF_UFLRA (Single Query Track)

    - + 2018-Yicesn 0 5402567.5022567.70954032921111 @@ -246,7 +246,7 @@

    QF_UFLRA (Single Query Track)

    - + Z3n 0 5402687.562687.59854032921111 @@ -255,7 +255,7 @@

    QF_UFLRA (Single Query Track)

    - + veriT 0 5404614.1714614.10954032921111 @@ -264,7 +264,7 @@

    QF_UFLRA (Single Query Track)

    - + CVC4 0 5395657.4465657.61853932821122 @@ -273,7 +273,7 @@

    QF_UFLRA (Single Query Track)

    - + Alt-Ergo 0 201466971.375394859.1782010201340122 @@ -293,7 +293,7 @@

    QF_UFLRA (Single Query Track)

    - + SMTInterpol 0 3303699.9042760.19333033002110 @@ -302,7 +302,7 @@

    QF_UFLRA (Single Query Track)

    - + Yices 2.6.2 0 3292551.42551.6232932902121 @@ -311,7 +311,7 @@

    QF_UFLRA (Single Query Track)

    - + 2018-Yicesn 0 3292556.2692556.3732932902121 @@ -320,7 +320,7 @@

    QF_UFLRA (Single Query Track)

    - + Z3n 0 3292653.5882653.61932932902121 @@ -329,7 +329,7 @@

    QF_UFLRA (Single Query Track)

    - + veriT 0 3294571.314571.25432932902121 @@ -338,7 +338,7 @@

    QF_UFLRA (Single Query Track)

    - + CVC4 0 3285513.9095514.08432832802132 @@ -347,7 +347,7 @@

    QF_UFLRA (Single Query Track)

    - + Alt-Ergo 0 0402130.284360578.51000541122 @@ -367,7 +367,7 @@

    QF_UFLRA (Single Query Track)

    - + 2018-Yicesn 0 21111.23311.33821102113301 @@ -376,7 +376,7 @@

    QF_UFLRA (Single Query Track)

    - + Yices 2.6.2 0 21111.96812.1621102113301 @@ -385,7 +385,7 @@

    QF_UFLRA (Single Query Track)

    - + Z3n 0 21133.97133.97821102113301 @@ -394,7 +394,7 @@

    QF_UFLRA (Single Query Track)

    - + veriT 0 21142.86142.85421102113301 @@ -403,7 +403,7 @@

    QF_UFLRA (Single Query Track)

    - + CVC4 0 211143.537143.53421102113302 @@ -412,7 +412,7 @@

    QF_UFLRA (Single Query Track)

    - + SMTInterpol 0 211949.314454.21721102113300 @@ -421,7 +421,7 @@

    QF_UFLRA (Single Query Track)

    - + Alt-Ergo 0 20164841.09134280.6692010201340122 @@ -441,7 +441,7 @@

    QF_UFLRA (Single Query Track)

    - + 2018-Yicesn 0 53993.71493.91253932821122 @@ -450,7 +450,7 @@

    QF_UFLRA (Single Query Track)

    - + Yices 2.6.2 0 53996.12396.53153932821122 @@ -459,7 +459,7 @@

    QF_UFLRA (Single Query Track)

    - + veriT 0 538194.373194.31653832721133 @@ -468,7 +468,7 @@

    QF_UFLRA (Single Query Track)

    - + Z3n 0 537206.592206.62553732621144 @@ -477,7 +477,7 @@

    QF_UFLRA (Single Query Track)

    - + CVC4 0 534348.433348.36153432421077 @@ -486,7 +486,7 @@

    QF_UFLRA (Single Query Track)

    - + SMTInterpol 0 5341957.696856.73753432520977 @@ -495,7 +495,7 @@

    QF_UFLRA (Single Query Track)

    - + Alt-Ergo 0 8915298.05811343.19789089452402 @@ -519,7 +519,6 @@

    QF_UFLRA (Single Query Track)

    - + - diff --git a/archive/2019/results/qf-uflra-unsat-core.html b/archive/2019/results/qf-uflra-unsat-core.html index 37272fe3..08818317 100644 --- a/archive/2019/results/qf-uflra-unsat-core.html +++ b/archive/2019/results/qf-uflra-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFLRA (Unsat Core Track)

    Competition results for the QF_UFLRA - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_UFLRA (Unsat Core Track)

    Sequential PerformanceParallel Performance CVC4-ucCVC4-uc - - + + @@ -126,7 +126,7 @@

    QF_UFLRA (Unsat Core Track)

    - + Z3n 0 62 @@ -137,7 +137,7 @@

    QF_UFLRA (Unsat Core Track)

    - + 2018-MathSAT (unsat core)n 0 61 @@ -148,7 +148,7 @@

    QF_UFLRA (Unsat Core Track)

    - + CVC4-uc 0 61 @@ -159,7 +159,7 @@

    QF_UFLRA (Unsat Core Track)

    - + Yices 2.6.2 0 58 @@ -170,7 +170,7 @@

    QF_UFLRA (Unsat Core Track)

    - + SMTInterpol 0 57 @@ -192,7 +192,7 @@

    QF_UFLRA (Unsat Core Track)

    - + Z3n 0 6262.462.4070 @@ -201,7 +201,7 @@

    QF_UFLRA (Unsat Core Track)

    - + 2018-MathSAT (unsat core)n 0 61100.626100.6660 @@ -210,7 +210,7 @@

    QF_UFLRA (Unsat Core Track)

    - + CVC4-uc 0 61211.507211.5560 @@ -219,7 +219,7 @@

    QF_UFLRA (Unsat Core Track)

    - + Yices 2.6.2 0 5820.11120.2860 @@ -228,7 +228,7 @@

    QF_UFLRA (Unsat Core Track)

    - + SMTInterpol 0 57635.201437.8260 @@ -252,7 +252,6 @@

    QF_UFLRA (Unsat Core Track)

    - + - diff --git a/archive/2019/results/qf-ufnia-incremental.html b/archive/2019/results/qf-ufnia-incremental.html index bfdb515d..349db466 100644 --- a/archive/2019/results/qf-ufnia-incremental.html +++ b/archive/2019/results/qf-ufnia-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFNIA (Incremental Track)

    Competition results for the QF_UFNIA - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_UFNIA (Incremental Track)

    Parallel Performance MathSAT-default - - + + @@ -124,7 +124,7 @@

    QF_UFNIA (Incremental Track)

    - + MathSAT-default 0 8457339.34734.32400 @@ -133,7 +133,7 @@

    QF_UFNIA (Incremental Track)

    - + MathSAT-na-ext 0 8457340.40935.39400 @@ -142,7 +142,7 @@

    QF_UFNIA (Incremental Track)

    - + Z3n 0 8457354.54450.25800 @@ -151,7 +151,7 @@

    QF_UFNIA (Incremental Track)

    - + 2018-Z3 (incremental)n 0 8457361.19556.70200 @@ -160,7 +160,7 @@

    QF_UFNIA (Incremental Track)

    - + CVC4-inc 0 84573259.433253.01500 @@ -169,7 +169,7 @@

    QF_UFNIA (Incremental Track)

    - + Yices 2.6.2 Incremental 0 392400.02400.0845341 @@ -193,7 +193,6 @@

    QF_UFNIA (Incremental Track)

    - + - diff --git a/archive/2019/results/qf-ufnia-single-query.html b/archive/2019/results/qf-ufnia-single-query.html index 4c47569d..b5a02dc2 100644 --- a/archive/2019/results/qf-ufnia-single-query.html +++ b/archive/2019/results/qf-ufnia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFNIA (Single Query Track)

    Competition results for the QF_UFNIA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_UFNIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) CVC4CVC4CVC4 - - + + CVC4 - - + + CVC4 - + @@ -131,7 +131,7 @@

    QF_UFNIA (Single Query Track)

    - + CVC4 0 285 @@ -142,7 +142,7 @@

    QF_UFNIA (Single Query Track)

    - + MathSAT-default 0 270 @@ -153,7 +153,7 @@

    QF_UFNIA (Single Query Track)

    - + 2018-Yicesn 0 268 @@ -164,7 +164,7 @@

    QF_UFNIA (Single Query Track)

    - + Yices 2.6.2 0 268 @@ -175,7 +175,7 @@

    QF_UFNIA (Single Query Track)

    - + Z3n 0 264 @@ -186,7 +186,7 @@

    QF_UFNIA (Single Query Track)

    - + MathSAT-na-ext 0 259 @@ -197,7 +197,7 @@

    QF_UFNIA (Single Query Track)

    - + Alt-Ergo 0 71 @@ -219,7 +219,7 @@

    QF_UFNIA (Single Query Track)

    - + CVC4 0 2852711.1932711.235285175110151 @@ -228,7 +228,7 @@

    QF_UFNIA (Single Query Track)

    - + MathSAT-default 0 27076067.35576067.462701601103030 @@ -237,7 +237,7 @@

    QF_UFNIA (Single Query Track)

    - + 2018-Yicesn 0 26850707.19950707.63268170983221 @@ -246,7 +246,7 @@

    QF_UFNIA (Single Query Track)

    - + Yices 2.6.2 0 26850769.73650770.291268170983221 @@ -255,7 +255,7 @@

    QF_UFNIA (Single Query Track)

    - + Z3n 0 26459511.79759512.3622641541103622 @@ -264,7 +264,7 @@

    QF_UFNIA (Single Query Track)

    - + MathSAT-na-ext 0 259100277.352100277.6612591491104141 @@ -273,7 +273,7 @@

    QF_UFNIA (Single Query Track)

    - + Alt-Ergo 0 7150.8421.119710712290 @@ -293,7 +293,7 @@

    QF_UFNIA (Single Query Track)

    - + CVC4 0 175300.4300.46517517501251 @@ -302,7 +302,7 @@

    QF_UFNIA (Single Query Track)

    - + 2018-Yicesn 0 1702705.8062706.008170170013021 @@ -311,7 +311,7 @@

    QF_UFNIA (Single Query Track)

    - + Yices 2.6.2 0 1702768.2542768.551170170013021 @@ -320,7 +320,7 @@

    QF_UFNIA (Single Query Track)

    - + MathSAT-default 0 16049646.01149646.106160160014030 @@ -329,7 +329,7 @@

    QF_UFNIA (Single Query Track)

    - + Z3n 0 15444461.54944461.939154154014622 @@ -338,7 +338,7 @@

    QF_UFNIA (Single Query Track)

    - + MathSAT-na-ext 0 14973847.76573848.062149149015141 @@ -347,7 +347,7 @@

    QF_UFNIA (Single Query Track)

    - + Alt-Ergo 0 038.88514.9760003000 @@ -367,7 +367,7 @@

    QF_UFNIA (Single Query Track)

    - + CVC4 0 1105.3375.31411001101901 @@ -376,7 +376,7 @@

    QF_UFNIA (Single Query Track)

    - + Z3n 0 1107724.9867725.136110011019022 @@ -385,7 +385,7 @@

    QF_UFNIA (Single Query Track)

    - + MathSAT-default 0 11016821.34416821.355110011019030 @@ -394,7 +394,7 @@

    QF_UFNIA (Single Query Track)

    - + MathSAT-na-ext 0 11016829.58716829.599110011019041 @@ -403,7 +403,7 @@

    QF_UFNIA (Single Query Track)

    - + 2018-Yicesn 0 9838401.39338401.6229809820221 @@ -412,7 +412,7 @@

    QF_UFNIA (Single Query Track)

    - + Yices 2.6.2 0 9838401.48238401.749809820221 @@ -421,7 +421,7 @@

    QF_UFNIA (Single Query Track)

    - + Alt-Ergo 0 7111.1455.828710712290 @@ -441,7 +441,7 @@

    QF_UFNIA (Single Query Track)

    - + CVC4 0 283120.66120.628283173110173 @@ -450,7 +450,7 @@

    QF_UFNIA (Single Query Track)

    - + 2018-Yicesn 0 266568.423568.848266168983423 @@ -459,7 +459,7 @@

    QF_UFNIA (Single Query Track)

    - + Yices 2.6.2 0 266571.218571.732266168983423 @@ -468,7 +468,7 @@

    QF_UFNIA (Single Query Track)

    - + MathSAT-default 0 266990.553990.5762661561103434 @@ -477,7 +477,7 @@

    QF_UFNIA (Single Query Track)

    - + Z3n 0 2541190.891190.9042541461084645 @@ -486,7 +486,7 @@

    QF_UFNIA (Single Query Track)

    - + MathSAT-na-ext 0 2521266.4191266.4422521421104848 @@ -495,7 +495,7 @@

    QF_UFNIA (Single Query Track)

    - + Alt-Ergo 0 7150.8421.119710712290 @@ -519,7 +519,6 @@

    QF_UFNIA (Single Query Track)

    - + - diff --git a/archive/2019/results/qf-ufnia-unsat-core.html b/archive/2019/results/qf-ufnia-unsat-core.html index 594aa8c2..1494561b 100644 --- a/archive/2019/results/qf-ufnia-unsat-core.html +++ b/archive/2019/results/qf-ufnia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFNIA (Unsat Core Track)

    Competition results for the QF_UFNIA - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_UFNIA (Unsat Core Track)

    Sequential PerformanceParallel Performance MathSAT-defaultMathSAT-default - - + + @@ -126,7 +126,7 @@

    QF_UFNIA (Unsat Core Track)

    - + 2018-Z3 (unsat core)n 0 903 @@ -137,7 +137,7 @@

    QF_UFNIA (Unsat Core Track)

    - + Z3n 0 902 @@ -148,7 +148,7 @@

    QF_UFNIA (Unsat Core Track)

    - + MathSAT-default 0 864 @@ -159,7 +159,7 @@

    QF_UFNIA (Unsat Core Track)

    - + MathSAT-na-ext 0 858 @@ -170,7 +170,7 @@

    QF_UFNIA (Unsat Core Track)

    - + CVC4-uc 0 820 @@ -192,7 +192,7 @@

    QF_UFNIA (Unsat Core Track)

    - + 2018-Z3 (unsat core)n 0 903518.617518.6580 @@ -201,7 +201,7 @@

    QF_UFNIA (Unsat Core Track)

    - + Z3n 0 902500.019500.0210 @@ -210,7 +210,7 @@

    QF_UFNIA (Unsat Core Track)

    - + MathSAT-default 0 86412014.68612014.7055 @@ -219,7 +219,7 @@

    QF_UFNIA (Unsat Core Track)

    - + MathSAT-na-ext 0 85812419.84112419.9455 @@ -228,7 +228,7 @@

    QF_UFNIA (Unsat Core Track)

    - + CVC4-uc 0 8207.4397.4280 @@ -252,7 +252,6 @@

    QF_UFNIA (Unsat Core Track)

    - + - diff --git a/archive/2019/results/qf-ufnra-single-query.html b/archive/2019/results/qf-ufnra-single-query.html index 9dc95f74..33d438ca 100644 --- a/archive/2019/results/qf-ufnra-single-query.html +++ b/archive/2019/results/qf-ufnra-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFNRA (Single Query Track)

    Competition results for the QF_UFNRA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_UFNRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Par4Par4Par4 - - + + MathSAT-default - - + + Par4 - + @@ -131,7 +131,7 @@

    QF_UFNRA (Single Query Track)

    - + Par4 0 25 @@ -142,7 +142,7 @@

    QF_UFNRA (Single Query Track)

    - + 2018-Yicesn 0 25 @@ -153,7 +153,7 @@

    QF_UFNRA (Single Query Track)

    - + Yices 2.6.2 0 24 @@ -164,7 +164,7 @@

    QF_UFNRA (Single Query Track)

    - + Z3n 0 20 @@ -175,7 +175,7 @@

    QF_UFNRA (Single Query Track)

    - + CVC4 0 12 @@ -186,7 +186,7 @@

    QF_UFNRA (Single Query Track)

    - + MathSAT-default 0 9 @@ -197,7 +197,7 @@

    QF_UFNRA (Single Query Track)

    - + MathSAT-na-ext 0 9 @@ -208,7 +208,7 @@

    QF_UFNRA (Single Query Track)

    - + veriT+raSAT+Redlog 0 0 @@ -219,7 +219,7 @@

    QF_UFNRA (Single Query Track)

    - + Alt-Ergo 0 0 @@ -241,7 +241,7 @@

    QF_UFNRA (Single Query Track)

    - + Par4 0 2610778.8195391.9342624200 @@ -250,7 +250,7 @@

    QF_UFNRA (Single Query Track)

    - + 2018-Yicesn 0 2515890.04815891.0112523211 @@ -259,7 +259,7 @@

    QF_UFNRA (Single Query Track)

    - + Yices 2.6.2 0 2415943.2715944.1632422222 @@ -268,7 +268,7 @@

    QF_UFNRA (Single Query Track)

    - + Z3n 0 20321.653321.6872018260 @@ -277,7 +277,7 @@

    QF_UFNRA (Single Query Track)

    - + CVC4 0 1234870.73734871.071121021414 @@ -286,7 +286,7 @@

    QF_UFNRA (Single Query Track)

    - + MathSAT-default 0 940878.72940878.7339721717 @@ -295,7 +295,7 @@

    QF_UFNRA (Single Query Track)

    - + MathSAT-na-ext 0 941420.54341420.6369721717 @@ -304,7 +304,7 @@

    QF_UFNRA (Single Query Track)

    - + Alt-Ergo 0 062400.062400.00002626 @@ -313,7 +313,7 @@

    QF_UFNRA (Single Query Track)

    - + veriT+raSAT+Redlog 0 062400.062400.00002626 @@ -333,7 +333,7 @@

    QF_UFNRA (Single Query Track)

    - + Par4 0 2410778.8065391.4452424020 @@ -342,7 +342,7 @@

    QF_UFNRA (Single Query Track)

    - + 2018-Yicesn 0 2315887.75715888.7192323031 @@ -351,7 +351,7 @@

    QF_UFNRA (Single Query Track)

    - + Yices 2.6.2 0 2215940.74315941.6362222042 @@ -360,7 +360,7 @@

    QF_UFNRA (Single Query Track)

    - + Z3n 0 18321.34321.3751818080 @@ -369,7 +369,7 @@

    QF_UFNRA (Single Query Track)

    - + CVC4 0 1034861.39234861.726101001614 @@ -378,7 +378,7 @@

    QF_UFNRA (Single Query Track)

    - + MathSAT-default 0 740878.47440878.4787701917 @@ -387,7 +387,7 @@

    QF_UFNRA (Single Query Track)

    - + MathSAT-na-ext 0 741420.28141420.3747701917 @@ -396,7 +396,7 @@

    QF_UFNRA (Single Query Track)

    - + Alt-Ergo 0 057600.057600.00002626 @@ -405,7 +405,7 @@

    QF_UFNRA (Single Query Track)

    - + veriT+raSAT+Redlog 0 057600.057600.00002626 @@ -425,7 +425,7 @@

    QF_UFNRA (Single Query Track)

    - + MathSAT-default 0 20.2550.2552022417 @@ -434,7 +434,7 @@

    QF_UFNRA (Single Query Track)

    - + MathSAT-na-ext 0 20.2610.2612022417 @@ -443,7 +443,7 @@

    QF_UFNRA (Single Query Track)

    - + Z3n 0 20.3120.312202240 @@ -452,7 +452,7 @@

    QF_UFNRA (Single Query Track)

    - + Par4 0 20.0120.489202240 @@ -461,7 +461,7 @@

    QF_UFNRA (Single Query Track)

    - + 2018-Yicesn 0 22.2912.291202241 @@ -470,7 +470,7 @@

    QF_UFNRA (Single Query Track)

    - + Yices 2.6.2 0 22.5272.527202242 @@ -479,7 +479,7 @@

    QF_UFNRA (Single Query Track)

    - + CVC4 0 29.3459.3452022414 @@ -488,7 +488,7 @@

    QF_UFNRA (Single Query Track)

    - + Alt-Ergo 0 04800.04800.00002626 @@ -497,7 +497,7 @@

    QF_UFNRA (Single Query Track)

    - + veriT+raSAT+Redlog 0 04800.04800.00002626 @@ -517,7 +517,7 @@

    QF_UFNRA (Single Query Track)

    - + Z3n 0 19264.867264.8861917273 @@ -526,7 +526,7 @@

    QF_UFNRA (Single Query Track)

    - + Par4 0 18397.909296.6691816288 @@ -535,7 +535,7 @@

    QF_UFNRA (Single Query Track)

    - + MathSAT-default 0 9486.729486.7339721717 @@ -544,7 +544,7 @@

    QF_UFNRA (Single Query Track)

    - + MathSAT-na-ext 0 5554.545554.5535322121 @@ -553,7 +553,7 @@

    QF_UFNRA (Single Query Track)

    - + 2018-Yicesn 0 4555.062555.0664222222 @@ -562,7 +562,7 @@

    QF_UFNRA (Single Query Track)

    - + Yices 2.6.2 0 4557.89557.8934222222 @@ -571,7 +571,7 @@

    QF_UFNRA (Single Query Track)

    - + CVC4 0 2585.345585.3452022424 @@ -580,7 +580,7 @@

    QF_UFNRA (Single Query Track)

    - + Alt-Ergo 0 0624.0624.00002626 @@ -589,7 +589,7 @@

    QF_UFNRA (Single Query Track)

    - + veriT+raSAT+Redlog 0 0624.0624.00002626 @@ -613,7 +613,6 @@

    QF_UFNRA (Single Query Track)

    - + - diff --git a/archive/2019/results/qf-ufnra-unsat-core.html b/archive/2019/results/qf-ufnra-unsat-core.html index 2af079e0..3c50ff30 100644 --- a/archive/2019/results/qf-ufnra-unsat-core.html +++ b/archive/2019/results/qf-ufnra-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFNRA (Unsat Core Track)

    Competition results for the QF_UFNRA - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_UFNRA (Unsat Core Track)

    Sequential PerformanceParallel Performance MathSAT-defaultMathSAT-default - - + + @@ -126,7 +126,7 @@

    QF_UFNRA (Unsat Core Track)

    - + 2018-Z3 (unsat core)n 0 66 @@ -137,7 +137,7 @@

    QF_UFNRA (Unsat Core Track)

    - + Z3n 0 66 @@ -148,7 +148,7 @@

    QF_UFNRA (Unsat Core Track)

    - + MathSAT-default 0 64 @@ -159,7 +159,7 @@

    QF_UFNRA (Unsat Core Track)

    - + MathSAT-na-ext 0 64 @@ -170,7 +170,7 @@

    QF_UFNRA (Unsat Core Track)

    - + CVC4-uc 0 42 @@ -192,7 +192,7 @@

    QF_UFNRA (Unsat Core Track)

    - + 2018-Z3 (unsat core)n 0 660.6750.6750 @@ -201,7 +201,7 @@

    QF_UFNRA (Unsat Core Track)

    - + Z3n 0 660.690.690 @@ -210,7 +210,7 @@

    QF_UFNRA (Unsat Core Track)

    - + MathSAT-default 0 640.4630.4630 @@ -219,7 +219,7 @@

    QF_UFNRA (Unsat Core Track)

    - + MathSAT-na-ext 0 640.4720.4720 @@ -228,7 +228,7 @@

    QF_UFNRA (Unsat Core Track)

    - + CVC4-uc 0 426.9796.9770 @@ -252,7 +252,6 @@

    QF_UFNRA (Unsat Core Track)

    - + - diff --git a/archive/2019/results/results-challenge-incremental.html b/archive/2019/results/results-challenge-incremental.html index 69eb876f..d02de18d 100644 --- a/archive/2019/results/results-challenge-incremental.html +++ b/archive/2019/results/results-challenge-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -94,7 +94,6 @@

    SMT-COMP 2019 Results - Challenge Track (incremental) (Summary)

    - + - diff --git a/archive/2019/results/results-challenge-non-incremental.html b/archive/2019/results/results-challenge-non-incremental.html index c72ed8f9..7d1d3fac 100644 --- a/archive/2019/results/results-challenge-non-incremental.html +++ b/archive/2019/results/results-challenge-non-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -106,7 +106,6 @@

    SMT-COMP 2019 Results - Challenge Track (non-incremental) (Summary)

    - + - diff --git a/archive/2019/results/results-incremental.html b/archive/2019/results/results-incremental.html index e6243a55..2baad6b2 100644 --- a/archive/2019/results/results-incremental.html +++ b/archive/2019/results/results-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -178,7 +178,6 @@

    SMT-COMP 2019 Results - Incremental Track (Summary)

    - + - diff --git a/archive/2019/results/results-model-validation.html b/archive/2019/results/results-model-validation.html index df6d0030..59a115bc 100644 --- a/archive/2019/results/results-model-validation.html +++ b/archive/2019/results/results-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -89,7 +89,6 @@

    SMT-COMP 2019 Results - Model Validation Track (experimental) (Summary)

    - + - diff --git a/archive/2019/results/results-single-query.html b/archive/2019/results/results-single-query.html index c17e35fb..3a427fef 100644 --- a/archive/2019/results/results-single-query.html +++ b/archive/2019/results/results-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -496,7 +496,6 @@

    SMT-COMP 2019 Results - Single Query Track (Summary)

    - + - diff --git a/archive/2019/results/results-unsat-core.html b/archive/2019/results/results-unsat-core.html index 8465ea16..73ddc4c8 100644 --- a/archive/2019/results/results-unsat-core.html +++ b/archive/2019/results/results-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -247,7 +247,6 @@

    SMT-COMP 2019 Results - Unsat Core Track (Summary)

    - + - diff --git a/archive/2019/results/uf-single-query.html b/archive/2019/results/uf-single-query.html index 018fef53..554acc1b 100644 --- a/archive/2019/results/uf-single-query.html +++ b/archive/2019/results/uf-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UF (Single Query Track)

    Competition results for the UF - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UF (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) CVC4VampireVampire - - + + Par4 - - + + Vampire - + @@ -131,7 +131,7 @@

    UF (Single Query Track)

    - + CVC4 0 1141 @@ -142,7 +142,7 @@

    UF (Single Query Track)

    - + 2018-CVC4n 0 1130 @@ -153,7 +153,7 @@

    UF (Single Query Track)

    - + Par4 0 1128 @@ -164,7 +164,7 @@

    UF (Single Query Track)

    - + Vampire 0 1120 @@ -175,7 +175,7 @@

    UF (Single Query Track)

    - + 2018-Vampiren 0 1078 @@ -186,7 +186,7 @@

    UF (Single Query Track)

    - + veriT 0 665 @@ -197,7 +197,7 @@

    UF (Single Query Track)

    - + Alt-Ergo 0 640 @@ -208,7 +208,7 @@

    UF (Single Query Track)

    - + Z3n 0 455 @@ -219,7 +219,7 @@

    UF (Single Query Track)

    - + SMTInterpol 0 16 @@ -230,7 +230,7 @@

    UF (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 0 @@ -241,7 +241,7 @@

    UF (Single Query Track)

    - + UltimateEliminator+SMTInterpol 0 0 @@ -252,7 +252,7 @@

    UF (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 0 @@ -274,7 +274,7 @@

    UF (Single Query Track)

    - + 2018-Vampiren 0 11716257667.9524099803.018117143473716451645 @@ -283,7 +283,7 @@

    UF (Single Query Track)

    - + Vampire 0 11575071140.7314070434.872115742773016591659 @@ -292,7 +292,7 @@

    UF (Single Query Track)

    - + Par4 0 11424585000.0644207563.01114237077216741406 @@ -301,7 +301,7 @@

    UF (Single Query Track)

    - + CVC4 0 11414430946.3984436373.452114137676516751675 @@ -310,7 +310,7 @@

    UF (Single Query Track)

    - + 2018-CVC4n 0 11304257953.2944260560.957113037175916861686 @@ -319,7 +319,7 @@

    UF (Single Query Track)

    - + veriT 0 6655112293.1495111316.269665066521511851 @@ -328,7 +328,7 @@

    UF (Single Query Track)

    - + Alt-Ergo 0 6645371320.6515088014.59664066421522011 @@ -337,7 +337,7 @@

    UF (Single Query Track)

    - + Z3n 0 4553237499.6093237656.524455414142361861 @@ -346,7 +346,7 @@

    UF (Single Query Track)

    - + SMTInterpol 0 166815660.7456664398.4471641228002744 @@ -355,7 +355,7 @@

    UF (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 010021.8747369.0400028160 @@ -364,7 +364,7 @@

    UF (Single Query Track)

    - + UltimateEliminator+SMTInterpol 0 016950.18213252.99100028163 @@ -373,7 +373,7 @@

    UF (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 017216.90713888.84400028163 @@ -393,7 +393,7 @@

    UF (Single Query Track)

    - + 2018-Vampiren 0 434157419.76875400.861434434023821645 @@ -402,7 +402,7 @@

    UF (Single Query Track)

    - + Vampire 0 427180023.99295517.912427427023891659 @@ -411,7 +411,7 @@

    UF (Single Query Track)

    - + CVC4 0 376558407.519563371.256376376024401675 @@ -420,7 +420,7 @@

    UF (Single Query Track)

    - + 2018-CVC4n 0 371393712.229396192.331371371024451686 @@ -429,7 +429,7 @@

    UF (Single Query Track)

    - + Par4 0 370710592.709381552.636370370024461406 @@ -438,7 +438,7 @@

    UF (Single Query Track)

    - + Z3n 0 41584817.156584856.326414102775861 @@ -447,7 +447,7 @@

    UF (Single Query Track)

    - + SMTInterpol 0 41093231.241072619.47544028122744 @@ -456,7 +456,7 @@

    UF (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 01608.551077.02400028160 @@ -465,7 +465,7 @@

    UF (Single Query Track)

    - + UltimateEliminator+SMTInterpol 0 08793.9848182.67600028163 @@ -474,7 +474,7 @@

    UF (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 08792.478277.23100028163 @@ -483,7 +483,7 @@

    UF (Single Query Track)

    - + Alt-Ergo 0 01037976.0021006996.02800028162011 @@ -492,7 +492,7 @@

    UF (Single Query Track)

    - + veriT 0 01039068.2181039073.23600028161851 @@ -512,7 +512,7 @@

    UF (Single Query Track)

    - + Par4 0 772212007.356163610.373772077220441406 @@ -521,7 +521,7 @@

    UF (Single Query Track)

    - + CVC4 0 765210138.879210602.197765076520511675 @@ -530,7 +530,7 @@

    UF (Single Query Track)

    - + 2018-CVC4n 0 759201841.065201968.625759075920571686 @@ -539,7 +539,7 @@

    UF (Single Query Track)

    - + 2018-Vampiren 0 737918615.444363787.298737073720791645 @@ -548,7 +548,7 @@

    UF (Single Query Track)

    - + Vampire 0 730595102.038313020.29730073020861659 @@ -557,7 +557,7 @@

    UF (Single Query Track)

    - + veriT 0 665426531.004426519.5665066521511851 @@ -566,7 +566,7 @@

    UF (Single Query Track)

    - + Alt-Ergo 0 664576044.213441301.567664066421522011 @@ -575,7 +575,7 @@

    UF (Single Query Track)

    - + Z3n 0 414611765.708611779.4841404142402861 @@ -584,7 +584,7 @@

    UF (Single Query Track)

    - + SMTInterpol 0 122003103.0871953209.8071201228042744 @@ -593,7 +593,7 @@

    UF (Single Query Track)

    - + UltimateEliminator+SMTInterpol 0 02881.3211784.65600028163 @@ -602,7 +602,7 @@

    UF (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 02976.0031975.20300028163 @@ -611,7 +611,7 @@

    UF (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 02987.912670.38500028160 @@ -631,7 +631,7 @@

    UF (Single Query Track)

    - + 2018-Vampiren 0 84754045.68149072.35984739245519691969 @@ -640,7 +640,7 @@

    UF (Single Query Track)

    - + Vampire 0 77458178.32251417.48577426351120422042 @@ -649,7 +649,7 @@

    UF (Single Query Track)

    - + Par4 0 71852111.0451030.7317184767120981830 @@ -658,7 +658,7 @@

    UF (Single Query Track)

    - + CVC4 0 60653596.51953582.489606759922102210 @@ -667,7 +667,7 @@

    UF (Single Query Track)

    - + 2018-CVC4n 0 60653619.46353616.958606759922102210 @@ -676,7 +676,7 @@

    UF (Single Query Track)

    - + veriT 0 60353868.68853853.304603060322132033 @@ -685,7 +685,7 @@

    UF (Single Query Track)

    - + Alt-Ergo 0 56357194.14254238.07563056322532122 @@ -694,7 +694,7 @@

    UF (Single Query Track)

    - + Z3n 0 41258138.26758114.7584123837424042332 @@ -703,7 +703,7 @@

    UF (Single Query Track)

    - + SMTInterpol 0 1067264.71967221.542104628062799 @@ -712,7 +712,7 @@

    UF (Single Query Track)

    - + UltimateEliminator+SMTInterpol 0 09822.1826124.99100028163 @@ -721,7 +721,7 @@

    UF (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 010041.5656703.20600028161 @@ -730,7 +730,7 @@

    UF (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 010088.9076760.84400028163 @@ -754,7 +754,6 @@

    UF (Single Query Track)

    - + - diff --git a/archive/2019/results/uf-unsat-core.html b/archive/2019/results/uf-unsat-core.html index 3e62b0f7..d12f0a4b 100644 --- a/archive/2019/results/uf-unsat-core.html +++ b/archive/2019/results/uf-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UF (Unsat Core Track)

    Competition results for the UF - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    UF (Unsat Core Track)

    Sequential PerformanceParallel Performance CVC4-ucCVC4-uc - - + + @@ -126,7 +126,7 @@

    UF (Unsat Core Track)

    - + 2018-CVC4 (unsat core)n 0 476991 @@ -137,7 +137,7 @@

    UF (Unsat Core Track)

    - + CVC4-uc 0 476907 @@ -148,7 +148,7 @@

    UF (Unsat Core Track)

    - + Z3n 0 407258 @@ -159,7 +159,7 @@

    UF (Unsat Core Track)

    - + UltimateEliminator+SMTInterpol 0 0 @@ -170,7 +170,7 @@

    UF (Unsat Core Track)

    - + UltimateEliminator+Yices-2.6.1 0 0 @@ -181,7 +181,7 @@

    UF (Unsat Core Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 0 @@ -203,7 +203,7 @@

    UF (Unsat Core Track)

    - + 2018-CVC4 (unsat core)n 0 476991369287.168369288.605148 @@ -212,7 +212,7 @@

    UF (Unsat Core Track)

    - + CVC4-uc 0 476907371216.626371214.902149 @@ -221,7 +221,7 @@

    UF (Unsat Core Track)

    - + Z3n 0 407258489822.624489864.084129 @@ -230,7 +230,7 @@

    UF (Unsat Core Track)

    - + UltimateEliminator+SMTInterpol 0 05910.2423673.7810 @@ -239,7 +239,7 @@

    UF (Unsat Core Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 06075.1074054.5410 @@ -248,7 +248,7 @@

    UF (Unsat Core Track)

    - + UltimateEliminator+Yices-2.6.1 0 06071.5224084.5530 @@ -272,7 +272,6 @@

    UF (Unsat Core Track)

    - + - diff --git a/archive/2019/results/ufbv-single-query.html b/archive/2019/results/ufbv-single-query.html index 72194ade..fc57c0f4 100644 --- a/archive/2019/results/ufbv-single-query.html +++ b/archive/2019/results/ufbv-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFBV (Single Query Track)

    Competition results for the UFBV - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UFBV (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Par4Par4Par4 - - + + Par4 - - + + Par4 - + @@ -131,7 +131,7 @@

    UFBV (Single Query Track)

    - + 2018-Z3n 0 51 @@ -142,7 +142,7 @@

    UFBV (Single Query Track)

    - + Par4 0 51 @@ -153,7 +153,7 @@

    UFBV (Single Query Track)

    - + Z3n 0 49 @@ -164,7 +164,7 @@

    UFBV (Single Query Track)

    - + CVC4 0 28 @@ -175,7 +175,7 @@

    UFBV (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 6 @@ -197,7 +197,7 @@

    UFBV (Single Query Track)

    - + Par4 0 5150462.30450436.8255118332121 @@ -206,7 +206,7 @@

    UFBV (Single Query Track)

    - + 2018-Z3n 0 5150437.57750437.5845118332121 @@ -215,7 +215,7 @@

    UFBV (Single Query Track)

    - + Z3n 0 4955234.20255234.2064918312323 @@ -224,7 +224,7 @@

    UFBV (Single Query Track)

    - + CVC4 0 2884631.48884649.986280284434 @@ -233,7 +233,7 @@

    UFBV (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 68922.5918777.058606663 @@ -253,7 +253,7 @@

    UFBV (Single Query Track)

    - + Par4 0 181.2122.77181805421 @@ -262,7 +262,7 @@

    UFBV (Single Query Track)

    - + 2018-Z3n 0 183.0133.014181805421 @@ -271,7 +271,7 @@

    UFBV (Single Query Track)

    - + Z3n 0 183.0383.041181805423 @@ -280,7 +280,7 @@

    UFBV (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 01256.421199.581000723 @@ -289,7 +289,7 @@

    UFBV (Single Query Track)

    - + CVC4 0 021602.30821602.3020007234 @@ -309,7 +309,7 @@

    UFBV (Single Query Track)

    - + Par4 0 3361.09234.055330333921 @@ -318,7 +318,7 @@

    UFBV (Single Query Track)

    - + 2018-Z3n 0 3334.56434.57330333921 @@ -327,7 +327,7 @@

    UFBV (Single Query Track)

    - + Z3n 0 314831.1644831.165310314123 @@ -336,7 +336,7 @@

    UFBV (Single Query Track)

    - + CVC4 0 2812629.17912647.683280284434 @@ -345,7 +345,7 @@

    UFBV (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 6115.39680.93606663 @@ -365,7 +365,7 @@

    UFBV (Single Query Track)

    - + Par4 0 51566.304540.8255118332121 @@ -374,7 +374,7 @@

    UFBV (Single Query Track)

    - + 2018-Z3n 0 51541.577541.5845118332121 @@ -383,7 +383,7 @@

    UFBV (Single Query Track)

    - + Z3n 0 49586.202586.2064918312323 @@ -392,7 +392,7 @@

    UFBV (Single Query Track)

    - + CVC4 0 131281.2071281.209130135949 @@ -401,7 +401,7 @@

    UFBV (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 6441.096334.576606666 @@ -425,7 +425,6 @@

    UFBV (Single Query Track)

    - + - diff --git a/archive/2019/results/ufdt-single-query.html b/archive/2019/results/ufdt-single-query.html index 8483403c..b4a41e82 100644 --- a/archive/2019/results/ufdt-single-query.html +++ b/archive/2019/results/ufdt-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFDT (Single Query Track)

    Competition results for the UFDT - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UFDT (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) CVC4CVC4CVC4 - - + + CVC4 - - + + Alt-Ergo - + @@ -131,7 +131,7 @@

    UFDT (Single Query Track)

    - + CVC4 0 531 @@ -142,7 +142,7 @@

    UFDT (Single Query Track)

    - + 2018-CVC4n 0 524 @@ -153,7 +153,7 @@

    UFDT (Single Query Track)

    - + Vampire 0 415 @@ -164,7 +164,7 @@

    UFDT (Single Query Track)

    - + Alt-Ergo 0 358 @@ -186,7 +186,7 @@

    UFDT (Single Query Track)

    - + CVC4 0 5312569194.5112572700.61753111941210161016 @@ -195,7 +195,7 @@

    UFDT (Single Query Track)

    - + 2018-CVC4n 0 5242522848.2112524926.06752411740710231023 @@ -204,7 +204,7 @@

    UFDT (Single Query Track)

    - + Vampire 0 4343060141.5672718249.8074345837611131113 @@ -213,7 +213,7 @@

    UFDT (Single Query Track)

    - + Alt-Ergo 0 3722876046.162652558.061372037211751046 @@ -233,7 +233,7 @@

    UFDT (Single Query Track)

    - + CVC4 0 119187244.669190394.12119119014281016 @@ -242,7 +242,7 @@

    UFDT (Single Query Track)

    - + 2018-CVC4n 0 117140962.56142742.238117117014301023 @@ -251,7 +251,7 @@

    UFDT (Single Query Track)

    - + Vampire 0 58296449.028245203.0675858014891113 @@ -260,7 +260,7 @@

    UFDT (Single Query Track)

    - + Alt-Ergo 0 0250175.238231943.87400015471046 @@ -280,7 +280,7 @@

    UFDT (Single Query Track)

    - + CVC4 0 412113949.842114306.496412041211351016 @@ -289,7 +289,7 @@

    UFDT (Single Query Track)

    - + 2018-CVC4n 0 407113885.651114183.83407040711401023 @@ -298,7 +298,7 @@

    UFDT (Single Query Track)

    - + Vampire 0 376330089.529205198.92376037611711113 @@ -307,7 +307,7 @@

    UFDT (Single Query Track)

    - + Alt-Ergo 0 372280927.181199851.698372037211751046 @@ -327,7 +327,7 @@

    UFDT (Single Query Track)

    - + Alt-Ergo 0 33729045.78527809.545337033712101090 @@ -336,7 +336,7 @@

    UFDT (Single Query Track)

    - + 2018-CVC4n 0 32729573.6529573.583327332412201220 @@ -345,7 +345,7 @@

    UFDT (Single Query Track)

    - + CVC4 0 32629551.07429550.992326332312211221 @@ -354,7 +354,7 @@

    UFDT (Single Query Track)

    - + Vampire 0 27735508.94431772.901277027712701270 @@ -378,7 +378,6 @@

    UFDT (Single Query Track)

    - + - diff --git a/archive/2019/results/ufdtlia-single-query.html b/archive/2019/results/ufdtlia-single-query.html index f1fa5e1a..46127c57 100644 --- a/archive/2019/results/ufdtlia-single-query.html +++ b/archive/2019/results/ufdtlia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFDTLIA (Single Query Track)

    Competition results for the UFDTLIA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UFDTLIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) CVC4CVC4— - - + + CVC4 - - + + Vampire - + @@ -131,7 +131,7 @@

    UFDTLIA (Single Query Track)

    - + CVC4 0 218 @@ -142,7 +142,7 @@

    UFDTLIA (Single Query Track)

    - + 2018-CVC4n 0 216 @@ -153,7 +153,7 @@

    UFDTLIA (Single Query Track)

    - + Vampire 0 69 @@ -164,7 +164,7 @@

    UFDTLIA (Single Query Track)

    - + Alt-Ergo 0 55 @@ -186,7 +186,7 @@

    UFDTLIA (Single Query Track)

    - + CVC4 0 218299670.46301267.23221802188181 @@ -195,7 +195,7 @@

    UFDTLIA (Single Query Track)

    - + 2018-CVC4n 0 216251739.82253038.45521602168380 @@ -204,7 +204,7 @@

    UFDTLIA (Single Query Track)

    - + Vampire 0 69566345.341553786.00969069230230 @@ -213,7 +213,7 @@

    UFDTLIA (Single Query Track)

    - + Alt-Ergo 0 58599427.171581788.35858058241238 @@ -233,7 +233,7 @@

    UFDTLIA (Single Query Track)

    - + 2018-CVC4n 0 00.00.000029980 @@ -242,7 +242,7 @@

    UFDTLIA (Single Query Track)

    - + Alt-Ergo 0 00.00.0000299238 @@ -251,7 +251,7 @@

    UFDTLIA (Single Query Track)

    - + CVC4 0 00.00.000029981 @@ -260,7 +260,7 @@

    UFDTLIA (Single Query Track)

    - + Vampire 0 00.00.0000299230 @@ -280,7 +280,7 @@

    UFDTLIA (Single Query Track)

    - + CVC4 0 218107670.46109267.23221802188181 @@ -289,7 +289,7 @@

    UFDTLIA (Single Query Track)

    - + 2018-CVC4n 0 21661787.3362936.10521602168380 @@ -298,7 +298,7 @@

    UFDTLIA (Single Query Track)

    - + Vampire 0 69374345.341361786.00969069230230 @@ -307,7 +307,7 @@

    UFDTLIA (Single Query Track)

    - + Alt-Ergo 0 58407427.171389788.35858058241238 @@ -327,7 +327,7 @@

    UFDTLIA (Single Query Track)

    - + Vampire 0 586178.4115891.81358058241241 @@ -336,7 +336,7 @@

    UFDTLIA (Single Query Track)

    - + Alt-Ergo 0 536024.5515937.38753053246243 @@ -345,7 +345,7 @@

    UFDTLIA (Single Query Track)

    - + CVC4 0 486052.2636052.24648048251251 @@ -354,7 +354,7 @@

    UFDTLIA (Single Query Track)

    - + 2018-CVC4n 0 466100.3366097.71146046253253 @@ -378,7 +378,6 @@

    UFDTLIA (Single Query Track)

    - + - diff --git a/archive/2019/results/ufdtnia-single-query.html b/archive/2019/results/ufdtnia-single-query.html index b3d93122..5aa1812b 100644 --- a/archive/2019/results/ufdtnia-single-query.html +++ b/archive/2019/results/ufdtnia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFDTNIA (Single Query Track)

    Competition results for the UFDTNIA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UFDTNIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) VampireVampire— - - + + Vampire - - + + Vampire - + @@ -131,7 +131,7 @@

    UFDTNIA (Single Query Track)

    - + Vampire 0 1 @@ -142,7 +142,7 @@

    UFDTNIA (Single Query Track)

    - + CVC4 0 1 @@ -153,7 +153,7 @@

    UFDTNIA (Single Query Track)

    - + Alt-Ergo 0 0 @@ -175,7 +175,7 @@

    UFDTNIA (Single Query Track)

    - + Vampire 0 10.1330.25510100 @@ -184,7 +184,7 @@

    UFDTNIA (Single Query Track)

    - + CVC4 0 15.6535.65310100 @@ -193,7 +193,7 @@

    UFDTNIA (Single Query Track)

    - + Alt-Ergo 0 17818.711970.9710100 @@ -213,7 +213,7 @@

    UFDTNIA (Single Query Track)

    - + Alt-Ergo 0 00.00.000010 @@ -222,7 +222,7 @@

    UFDTNIA (Single Query Track)

    - + CVC4 0 00.00.000010 @@ -231,7 +231,7 @@

    UFDTNIA (Single Query Track)

    - + Vampire 0 00.00.000010 @@ -251,7 +251,7 @@

    UFDTNIA (Single Query Track)

    - + Vampire 0 10.1330.25510100 @@ -260,7 +260,7 @@

    UFDTNIA (Single Query Track)

    - + CVC4 0 15.6535.65310100 @@ -269,7 +269,7 @@

    UFDTNIA (Single Query Track)

    - + Alt-Ergo 0 17818.711970.9710100 @@ -289,7 +289,7 @@

    UFDTNIA (Single Query Track)

    - + Vampire 0 10.1330.25510100 @@ -298,7 +298,7 @@

    UFDTNIA (Single Query Track)

    - + CVC4 0 15.6535.65310100 @@ -307,7 +307,7 @@

    UFDTNIA (Single Query Track)

    - + Alt-Ergo 0 024.024.000011 @@ -331,7 +331,6 @@

    UFDTNIA (Single Query Track)

    - + - diff --git a/archive/2019/results/ufidl-single-query.html b/archive/2019/results/ufidl-single-query.html index a0ec0aa6..bd959d4e 100644 --- a/archive/2019/results/ufidl-single-query.html +++ b/archive/2019/results/ufidl-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFIDL (Single Query Track)

    Competition results for the UFIDL - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UFIDL (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Par4Par4Par4 - - + + Par4 - - + + Par4 - + @@ -131,7 +131,7 @@

    UFIDL (Single Query Track)

    - + Par4 0 12 @@ -142,7 +142,7 @@

    UFIDL (Single Query Track)

    - + Z3n 0 11 @@ -153,7 +153,7 @@

    UFIDL (Single Query Track)

    - + 2018-Z3n 0 11 @@ -164,7 +164,7 @@

    UFIDL (Single Query Track)

    - + CVC4 0 10 @@ -175,7 +175,7 @@

    UFIDL (Single Query Track)

    - + veriT 0 7 @@ -186,7 +186,7 @@

    UFIDL (Single Query Track)

    - + SMTInterpol 0 7 @@ -197,7 +197,7 @@

    UFIDL (Single Query Track)

    - + Vampire 0 7 @@ -208,7 +208,7 @@

    UFIDL (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 0 @@ -219,7 +219,7 @@

    UFIDL (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 0 @@ -230,7 +230,7 @@

    UFIDL (Single Query Track)

    - + UltimateEliminator+SMTInterpol 0 0 @@ -252,7 +252,7 @@

    UFIDL (Single Query Track)

    - + Par4 0 1220236.02919748.761123988 @@ -261,7 +261,7 @@

    UFIDL (Single Query Track)

    - + Z3n 0 1115692.13915692.639112996 @@ -270,7 +270,7 @@

    UFIDL (Single Query Track)

    - + 2018-Z3n 0 1116072.46216072.907112996 @@ -279,7 +279,7 @@

    UFIDL (Single Query Track)

    - + CVC4 0 1020175.35120278.7451019108 @@ -288,7 +288,7 @@

    UFIDL (Single Query Track)

    - + veriT 0 726306.45426306.6377071310 @@ -297,7 +297,7 @@

    UFIDL (Single Query Track)

    - + SMTInterpol 0 728947.16928907.7647161312 @@ -306,7 +306,7 @@

    UFIDL (Single Query Track)

    - + Vampire 0 731220.60131206.3917071313 @@ -315,7 +315,7 @@

    UFIDL (Single Query Track)

    - + UltimateEliminator+SMTInterpol 0 020655.42114273.006000205 @@ -324,7 +324,7 @@

    UFIDL (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 020755.73714308.432000205 @@ -333,7 +333,7 @@

    UFIDL (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 021032.87814376.617000205 @@ -353,7 +353,7 @@

    UFIDL (Single Query Track)

    - + Par4 0 31035.971548.098330178 @@ -362,7 +362,7 @@

    UFIDL (Single Query Track)

    - + 2018-Z3n 0 22400.1092400.109220186 @@ -371,7 +371,7 @@

    UFIDL (Single Query Track)

    - + Z3n 0 22400.1132400.113220186 @@ -380,7 +380,7 @@

    UFIDL (Single Query Track)

    - + CVC4 0 1974.0391077.436110198 @@ -389,7 +389,7 @@

    UFIDL (Single Query Track)

    - + SMTInterpol 0 12401.0442400.6541101912 @@ -398,7 +398,7 @@

    UFIDL (Single Query Track)

    - + UltimateEliminator+SMTInterpol 0 010.1486.357000205 @@ -407,7 +407,7 @@

    UFIDL (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 010.0767.01000205 @@ -416,7 +416,7 @@

    UFIDL (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 09.9897.109000205 @@ -425,7 +425,7 @@

    UFIDL (Single Query Track)

    - + veriT 0 02400.2362400.2120002010 @@ -434,7 +434,7 @@

    UFIDL (Single Query Track)

    - + Vampire 0 07200.07200.00002013 @@ -454,7 +454,7 @@

    UFIDL (Single Query Track)

    - + Par4 0 90.0570.663909118 @@ -463,7 +463,7 @@

    UFIDL (Single Query Track)

    - + 2018-Z3n 0 90.7530.753909116 @@ -472,7 +472,7 @@

    UFIDL (Single Query Track)

    - + Z3n 0 90.7780.778909116 @@ -481,7 +481,7 @@

    UFIDL (Single Query Track)

    - + CVC4 0 91.3121.309909118 @@ -490,7 +490,7 @@

    UFIDL (Single Query Track)

    - + veriT 0 74706.2184706.4267071310 @@ -499,7 +499,7 @@

    UFIDL (Single Query Track)

    - + Vampire 0 74820.6014806.3917071313 @@ -508,7 +508,7 @@

    UFIDL (Single Query Track)

    - + SMTInterpol 0 67346.1257307.116061412 @@ -517,7 +517,7 @@

    UFIDL (Single Query Track)

    - + UltimateEliminator+SMTInterpol 0 020614.60714249.458000205 @@ -526,7 +526,7 @@

    UFIDL (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 020717.94414282.526000205 @@ -535,7 +535,7 @@

    UFIDL (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 020994.75314350.673000205 @@ -555,7 +555,7 @@

    UFIDL (Single Query Track)

    - + Par4 0 11216.069216.785112999 @@ -564,7 +564,7 @@

    UFIDL (Single Query Track)

    - + 2018-Z3n 0 11216.862216.862112999 @@ -573,7 +573,7 @@

    UFIDL (Single Query Track)

    - + Z3n 0 11216.891216.891112999 @@ -582,7 +582,7 @@

    UFIDL (Single Query Track)

    - + CVC4 0 9217.903217.895909119 @@ -591,7 +591,7 @@

    UFIDL (Single Query Track)

    - + veriT 0 7264.514264.4877071311 @@ -600,7 +600,7 @@

    UFIDL (Single Query Track)

    - + Vampire 0 7332.601318.3917071313 @@ -609,7 +609,7 @@

    UFIDL (Single Query Track)

    - + SMTInterpol 0 6366.146337.0636151413 @@ -618,7 +618,7 @@

    UFIDL (Single Query Track)

    - + UltimateEliminator+SMTInterpol 0 0195.431173.896000206 @@ -627,7 +627,7 @@

    UFIDL (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 0193.498177.097000206 @@ -636,7 +636,7 @@

    UFIDL (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 0193.287177.182000206 @@ -660,7 +660,6 @@

    UFIDL (Single Query Track)

    - + - diff --git a/archive/2019/results/ufidl-unsat-core.html b/archive/2019/results/ufidl-unsat-core.html index 65ae2a28..1ea4b4df 100644 --- a/archive/2019/results/ufidl-unsat-core.html +++ b/archive/2019/results/ufidl-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFIDL (Unsat Core Track)

    Competition results for the UFIDL - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    UFIDL (Unsat Core Track)

    Sequential PerformanceParallel Performance CVC4-ucCVC4-uc - - + + @@ -126,7 +126,7 @@

    UFIDL (Unsat Core Track)

    - + 2018-CVC4 (unsat core)n 0 1914 @@ -137,7 +137,7 @@

    UFIDL (Unsat Core Track)

    - + CVC4-uc 0 1914 @@ -148,7 +148,7 @@

    UFIDL (Unsat Core Track)

    - + Z3n 0 1913 @@ -159,7 +159,7 @@

    UFIDL (Unsat Core Track)

    - + UltimateEliminator+Yices-2.6.1 0 0 @@ -170,7 +170,7 @@

    UFIDL (Unsat Core Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 0 @@ -181,7 +181,7 @@

    UFIDL (Unsat Core Track)

    - + UltimateEliminator+SMTInterpol 0 0 @@ -203,7 +203,7 @@

    UFIDL (Unsat Core Track)

    - + CVC4-uc 0 19142.0142.0070 @@ -212,7 +212,7 @@

    UFIDL (Unsat Core Track)

    - + 2018-CVC4 (unsat core)n 0 19142.0082.0390 @@ -221,7 +221,7 @@

    UFIDL (Unsat Core Track)

    - + Z3n 0 19131.6491.6490 @@ -230,7 +230,7 @@

    UFIDL (Unsat Core Track)

    - + UltimateEliminator+SMTInterpol 0 0104.5664.2940 @@ -239,7 +239,7 @@

    UFIDL (Unsat Core Track)

    - + UltimateEliminator+Yices-2.6.1 0 0102.66870.9550 @@ -248,7 +248,7 @@

    UFIDL (Unsat Core Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 0103.20971.3910 @@ -272,7 +272,6 @@

    UFIDL (Unsat Core Track)

    - + - diff --git a/archive/2019/results/uflia-single-query.html b/archive/2019/results/uflia-single-query.html index c2ade9bb..b1c2f26a 100644 --- a/archive/2019/results/uflia-single-query.html +++ b/archive/2019/results/uflia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFLIA (Single Query Track)

    Competition results for the UFLIA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UFLIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Par4Par4Par4 - - + + Par4 - - + + Par4 - + @@ -131,7 +131,7 @@

    UFLIA (Single Query Track)

    - + Par4 0 1674 @@ -142,7 +142,7 @@

    UFLIA (Single Query Track)

    - + CVC4 0 1644 @@ -153,7 +153,7 @@

    UFLIA (Single Query Track)

    - + 2018-CVC4n 0 1637 @@ -164,7 +164,7 @@

    UFLIA (Single Query Track)

    - + veriT 0 1545 @@ -175,7 +175,7 @@

    UFLIA (Single Query Track)

    - + Vampire 0 1423 @@ -186,7 +186,7 @@

    UFLIA (Single Query Track)

    - + Z3n 0 1405 @@ -197,7 +197,7 @@

    UFLIA (Single Query Track)

    - + Alt-Ergo 0 1356 @@ -208,7 +208,7 @@

    UFLIA (Single Query Track)

    - + SMTInterpol 0 47 @@ -219,7 +219,7 @@

    UFLIA (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 0 @@ -230,7 +230,7 @@

    UFLIA (Single Query Track)

    - + UltimateEliminator+SMTInterpol 0 0 @@ -241,7 +241,7 @@

    UFLIA (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 0 @@ -263,7 +263,7 @@

    UFLIA (Single Query Track)

    - + Par4 0 16812890786.762831381.87316818167311671056 @@ -272,7 +272,7 @@

    UFLIA (Single Query Track)

    - + CVC4 0 16442942508.1642944250.79416445163912041189 @@ -281,7 +281,7 @@

    UFLIA (Single Query Track)

    - + 2018-CVC4n 0 16372925682.6542926262.85116375163212111196 @@ -290,7 +290,7 @@

    UFLIA (Single Query Track)

    - + veriT 0 15453112958.6783112913.57315450154513031164 @@ -299,7 +299,7 @@

    UFLIA (Single Query Track)

    - + Vampire 0 14794198978.493391947.71414790147913691369 @@ -308,7 +308,7 @@

    UFLIA (Single Query Track)

    - + Z3n 0 14052058727.9952064116.8961405613991443595 @@ -317,7 +317,7 @@

    UFLIA (Single Query Track)

    - + Alt-Ergo 0 14013831462.3253500680.89314010140114471349 @@ -326,7 +326,7 @@

    UFLIA (Single Query Track)

    - + SMTInterpol 0 476689951.4526607955.4834754228012730 @@ -335,7 +335,7 @@

    UFLIA (Single Query Track)

    - + UltimateEliminator+SMTInterpol 0 021898.47618623.41500028485 @@ -344,7 +344,7 @@

    UFLIA (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 022286.48219950.49200028485 @@ -353,7 +353,7 @@

    UFLIA (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 033196.80623565.46700028486 @@ -373,7 +373,7 @@

    UFLIA (Single Query Track)

    - + Par4 0 811739.5898727.37888028401056 @@ -382,7 +382,7 @@

    UFLIA (Single Query Track)

    - + Z3n 0 610057.91610058.1696602842595 @@ -391,7 +391,7 @@

    UFLIA (Single Query Track)

    - + SMTInterpol 0 510783.7499273.20855028432730 @@ -400,7 +400,7 @@

    UFLIA (Single Query Track)

    - + 2018-CVC4n 0 512330.53512386.64555028431196 @@ -409,7 +409,7 @@

    UFLIA (Single Query Track)

    - + CVC4 0 514733.74614910.88755028431189 @@ -418,7 +418,7 @@

    UFLIA (Single Query Track)

    - + UltimateEliminator+SMTInterpol 0 037.02823.95300028485 @@ -427,7 +427,7 @@

    UFLIA (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 036.48925.12900028486 @@ -436,7 +436,7 @@

    UFLIA (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 037.12726.84500028485 @@ -445,7 +445,7 @@

    UFLIA (Single Query Track)

    - + Alt-Ergo 0 021601.16714297.38900028481349 @@ -454,7 +454,7 @@

    UFLIA (Single Query Track)

    - + veriT 0 016740.23816740.59400028481164 @@ -463,7 +463,7 @@

    UFLIA (Single Query Track)

    - + Vampire 0 026400.026400.000028481369 @@ -483,7 +483,7 @@

    UFLIA (Single Query Track)

    - + Par4 0 1673193447.17137054.49516730167311751056 @@ -492,7 +492,7 @@

    UFLIA (Single Query Track)

    - + CVC4 0 1639260187.921261677.73516390163912091189 @@ -501,7 +501,7 @@

    UFLIA (Single Query Track)

    - + 2018-CVC4n 0 1632251097.445251578.19616320163212161196 @@ -510,7 +510,7 @@

    UFLIA (Single Query Track)

    - + veriT 0 1545450641.826450588.98215450154513031164 @@ -519,7 +519,7 @@

    UFLIA (Single Query Track)

    - + Vampire 0 14791040560.28680849.82414790147913691369 @@ -528,7 +528,7 @@

    UFLIA (Single Query Track)

    - + Alt-Ergo 0 14011068457.35833883.82114010140114471349 @@ -537,7 +537,7 @@

    UFLIA (Single Query Track)

    - + Z3n 0 1399441021.895441972.651399013991449595 @@ -546,7 +546,7 @@

    UFLIA (Single Query Track)

    - + SMTInterpol 0 424008936.3783978280.9224204228062730 @@ -555,7 +555,7 @@

    UFLIA (Single Query Track)

    - + UltimateEliminator+SMTInterpol 0 017990.10216145.49700028485 @@ -564,7 +564,7 @@

    UFLIA (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 018236.73217182.76100028485 @@ -573,7 +573,7 @@

    UFLIA (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 026770.47518451.84900028486 @@ -593,7 +593,7 @@

    UFLIA (Single Query Track)

    - + Par4 0 158332669.43131293.42315835157812651154 @@ -602,7 +602,7 @@

    UFLIA (Single Query Track)

    - + 2018-CVC4n 0 145035308.2635303.12414501144913981397 @@ -611,7 +611,7 @@

    UFLIA (Single Query Track)

    - + CVC4 0 144835318.6535311.62914481144714001399 @@ -620,7 +620,7 @@

    UFLIA (Single Query Track)

    - + veriT 0 144535350.03235341.11814450144514031315 @@ -629,7 +629,7 @@

    UFLIA (Single Query Track)

    - + Z3n 0 137535888.23935891.02513756136914731444 @@ -638,7 +638,7 @@

    UFLIA (Single Query Track)

    - + Alt-Ergo 0 122249515.90541610.23612220122216261536 @@ -647,7 +647,7 @@

    UFLIA (Single Query Track)

    - + Vampire 0 114455770.17644806.62611440114417041704 @@ -656,7 +656,7 @@

    UFLIA (Single Query Track)

    - + SMTInterpol 0 4366883.56666792.6454353828052780 @@ -665,7 +665,7 @@

    UFLIA (Single Query Track)

    - + UltimateEliminator+SMTInterpol 0 010038.7486376.41300028486 @@ -674,7 +674,7 @@

    UFLIA (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 010359.0066992.74700028487 @@ -683,7 +683,7 @@

    UFLIA (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 010488.1427136.66600028489 @@ -707,7 +707,6 @@

    UFLIA (Single Query Track)

    - + - diff --git a/archive/2019/results/uflia-unsat-core.html b/archive/2019/results/uflia-unsat-core.html index 010b5bae..5d1e9ae8 100644 --- a/archive/2019/results/uflia-unsat-core.html +++ b/archive/2019/results/uflia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFLIA (Unsat Core Track)

    Competition results for the UFLIA - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    UFLIA (Unsat Core Track)

    Sequential PerformanceParallel Performance CVC4-ucCVC4-uc - - + + @@ -126,7 +126,7 @@

    UFLIA (Unsat Core Track)

    - + 2018-CVC4 (unsat core)n 0 931101 @@ -137,7 +137,7 @@

    UFLIA (Unsat Core Track)

    - + CVC4-uc 0 929656 @@ -148,7 +148,7 @@

    UFLIA (Unsat Core Track)

    - + Z3n 0 884418 @@ -159,7 +159,7 @@

    UFLIA (Unsat Core Track)

    - + UltimateEliminator+Yices-2.6.1 0 0 @@ -170,7 +170,7 @@

    UFLIA (Unsat Core Track)

    - + UltimateEliminator+SMTInterpol 0 0 @@ -181,7 +181,7 @@

    UFLIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 0 @@ -203,7 +203,7 @@

    UFLIA (Unsat Core Track)

    - + 2018-CVC4 (unsat core)n 0 931101380051.572380040.519152 @@ -212,7 +212,7 @@

    UFLIA (Unsat Core Track)

    - + CVC4-uc 0 929656389600.767389598.82155 @@ -221,7 +221,7 @@

    UFLIA (Unsat Core Track)

    - + Z3n 0 884418356565.205357392.64797 @@ -230,7 +230,7 @@

    UFLIA (Unsat Core Track)

    - + UltimateEliminator+Yices-2.6.1 0 025847.67720967.9385 @@ -239,7 +239,7 @@

    UFLIA (Unsat Core Track)

    - + UltimateEliminator+SMTInterpol 0 030291.5225216.9827 @@ -248,7 +248,7 @@

    UFLIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 030792.92126094.5267 @@ -272,7 +272,6 @@

    UFLIA (Unsat Core Track)

    - + - diff --git a/archive/2019/results/uflra-incremental.html b/archive/2019/results/uflra-incremental.html index 53232ea7..c88eb29b 100644 --- a/archive/2019/results/uflra-incremental.html +++ b/archive/2019/results/uflra-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFLRA (Incremental Track)

    Competition results for the UFLRA - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    UFLRA (Incremental Track)

    Parallel Performance CVC4-inc - - + + @@ -124,7 +124,7 @@

    UFLRA (Incremental Track)

    - + Z3n 0 442524184738.544184625.69613502016 @@ -133,7 +133,7 @@

    UFLRA (Incremental Track)

    - + CVC4-inc 0 13617237953.40637868.6794413726 @@ -142,7 +142,7 @@

    UFLRA (Incremental Track)

    - + SMTInterpol 0 103581699622.68694806.907473963245 @@ -151,7 +151,7 @@

    UFLRA (Incremental Track)

    - + 2018-Z3 (incremental)n 0 4007824381.3924220.1495374660 @@ -160,7 +160,7 @@

    UFLRA (Incremental Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 02810.2351347.3375775440 @@ -169,7 +169,7 @@

    UFLRA (Incremental Track)

    - + UltimateEliminator+Yices-2.6.1 0 02812.81347.7535775440 @@ -178,7 +178,7 @@

    UFLRA (Incremental Track)

    - + UltimateEliminator+SMTInterpol 0 02820.1891355.275775440 @@ -202,7 +202,6 @@

    UFLRA (Incremental Track)

    - + - diff --git a/archive/2019/results/uflra-single-query.html b/archive/2019/results/uflra-single-query.html index 98f08558..39c5f9ac 100644 --- a/archive/2019/results/uflra-single-query.html +++ b/archive/2019/results/uflra-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFLRA (Single Query Track)

    Competition results for the UFLRA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UFLRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Alt-ErgoAlt-Ergo— - - + + veriT - - + + Alt-Ergo - + @@ -131,7 +131,7 @@

    UFLRA (Single Query Track)

    - + 2018-Z3n 0 5 @@ -142,7 +142,7 @@

    UFLRA (Single Query Track)

    - + Z3n 0 4 @@ -153,7 +153,7 @@

    UFLRA (Single Query Track)

    - + Alt-Ergo 0 2 @@ -164,7 +164,7 @@

    UFLRA (Single Query Track)

    - + veriT 0 2 @@ -175,7 +175,7 @@

    UFLRA (Single Query Track)

    - + Vampire 0 2 @@ -186,7 +186,7 @@

    UFLRA (Single Query Track)

    - + CVC4 0 2 @@ -197,7 +197,7 @@

    UFLRA (Single Query Track)

    - + SMTInterpol 0 1 @@ -208,7 +208,7 @@

    UFLRA (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 0 @@ -219,7 +219,7 @@

    UFLRA (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 0 @@ -230,7 +230,7 @@

    UFLRA (Single Query Track)

    - + UltimateEliminator+SMTInterpol 0 0 @@ -252,7 +252,7 @@

    UFLRA (Single Query Track)

    - + 2018-Z3n 0 54901.2134901.23753222 @@ -261,7 +261,7 @@

    UFLRA (Single Query Track)

    - + Z3n 0 47208.2437208.24442233 @@ -270,7 +270,7 @@

    UFLRA (Single Query Track)

    - + Alt-Ergo 0 25614.7315132.96420252 @@ -279,7 +279,7 @@

    UFLRA (Single Query Track)

    - + veriT 0 29615.089615.0720254 @@ -288,7 +288,7 @@

    UFLRA (Single Query Track)

    - + Vampire 0 211098.07110004.20520254 @@ -297,7 +297,7 @@

    UFLRA (Single Query Track)

    - + CVC4 0 211710.24511710.46420254 @@ -306,7 +306,7 @@

    UFLRA (Single Query Track)

    - + SMTInterpol 0 112000.77312000.5910165 @@ -315,7 +315,7 @@

    UFLRA (Single Query Track)

    - + UltimateEliminator+SMTInterpol 0 023.29414.93200070 @@ -324,7 +324,7 @@

    UFLRA (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 022.99815.73200070 @@ -333,7 +333,7 @@

    UFLRA (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 023.25816.84600070 @@ -353,7 +353,7 @@

    UFLRA (Single Query Track)

    - + 2018-Z3n 0 34901.1414901.16633042 @@ -362,7 +362,7 @@

    UFLRA (Single Query Track)

    - + Z3n 0 27208.1717208.17122053 @@ -371,7 +371,7 @@

    UFLRA (Single Query Track)

    - + UltimateEliminator+SMTInterpol 0 016.61810.6200070 @@ -380,7 +380,7 @@

    UFLRA (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 016.46411.34100070 @@ -389,7 +389,7 @@

    UFLRA (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 016.54111.52100070 @@ -398,7 +398,7 @@

    UFLRA (Single Query Track)

    - + Alt-Ergo 0 05614.6795132.91300072 @@ -407,7 +407,7 @@

    UFLRA (Single Query Track)

    - + SMTInterpol 0 09600.4039600.30700075 @@ -416,7 +416,7 @@

    UFLRA (Single Query Track)

    - + veriT 0 09615.0549615.04500074 @@ -425,7 +425,7 @@

    UFLRA (Single Query Track)

    - + Vampire 0 011097.5310003.82700074 @@ -434,7 +434,7 @@

    UFLRA (Single Query Track)

    - + CVC4 0 011710.2111710.4300074 @@ -454,7 +454,7 @@

    UFLRA (Single Query Track)

    - + veriT 0 20.0250.02520254 @@ -463,7 +463,7 @@

    UFLRA (Single Query Track)

    - + CVC4 0 20.0350.03420254 @@ -472,7 +472,7 @@

    UFLRA (Single Query Track)

    - + Alt-Ergo 0 20.0520.05120252 @@ -481,7 +481,7 @@

    UFLRA (Single Query Track)

    - + 2018-Z3n 0 20.0720.07220252 @@ -490,7 +490,7 @@

    UFLRA (Single Query Track)

    - + Z3n 0 20.0730.07320253 @@ -499,7 +499,7 @@

    UFLRA (Single Query Track)

    - + Vampire 0 20.5410.37820254 @@ -508,7 +508,7 @@

    UFLRA (Single Query Track)

    - + SMTInterpol 0 12400.372400.28210165 @@ -517,7 +517,7 @@

    UFLRA (Single Query Track)

    - + UltimateEliminator+SMTInterpol 0 06.6764.31100070 @@ -526,7 +526,7 @@

    UFLRA (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 06.5354.39100070 @@ -535,7 +535,7 @@

    UFLRA (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 06.7175.32600070 @@ -555,7 +555,7 @@

    UFLRA (Single Query Track)

    - + Z3n 0 480.24380.24442233 @@ -564,7 +564,7 @@

    UFLRA (Single Query Track)

    - + 2018-Z3n 0 498.80798.80842233 @@ -573,7 +573,7 @@

    UFLRA (Single Query Track)

    - + Alt-Ergo 0 296.20796.10720254 @@ -582,7 +582,7 @@

    UFLRA (Single Query Track)

    - + veriT 0 2111.08111.0720254 @@ -591,7 +591,7 @@

    UFLRA (Single Query Track)

    - + CVC4 0 2120.035120.03420255 @@ -600,7 +600,7 @@

    UFLRA (Single Query Track)

    - + Vampire 0 2120.541120.37820255 @@ -609,7 +609,7 @@

    UFLRA (Single Query Track)

    - + SMTInterpol 0 1120.773120.5910165 @@ -618,7 +618,7 @@

    UFLRA (Single Query Track)

    - + UltimateEliminator+SMTInterpol 0 023.29414.93200070 @@ -627,7 +627,7 @@

    UFLRA (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 022.99815.73200070 @@ -636,7 +636,7 @@

    UFLRA (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 023.25816.84600070 @@ -660,7 +660,6 @@

    UFLRA (Single Query Track)

    - + - diff --git a/archive/2019/results/uflra-unsat-core.html b/archive/2019/results/uflra-unsat-core.html index e6026e13..fa02b2df 100644 --- a/archive/2019/results/uflra-unsat-core.html +++ b/archive/2019/results/uflra-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFLRA (Unsat Core Track)

    Competition results for the UFLRA - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    UFLRA (Unsat Core Track)

    Sequential PerformanceParallel Performance CVC4-ucCVC4-uc - - + + @@ -126,7 +126,7 @@

    UFLRA (Unsat Core Track)

    - + 2018-Z3 (unsat core)n 0 16 @@ -137,7 +137,7 @@

    UFLRA (Unsat Core Track)

    - + Z3n 0 16 @@ -148,7 +148,7 @@

    UFLRA (Unsat Core Track)

    - + CVC4-uc 0 13 @@ -159,7 +159,7 @@

    UFLRA (Unsat Core Track)

    - + UltimateEliminator+Yices-2.6.1 0 0 @@ -170,7 +170,7 @@

    UFLRA (Unsat Core Track)

    - + UltimateEliminator+SMTInterpol 0 0 @@ -181,7 +181,7 @@

    UFLRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 0 @@ -203,7 +203,7 @@

    UFLRA (Unsat Core Track)

    - + 2018-Z3 (unsat core)n 0 160.3530.3540 @@ -212,7 +212,7 @@

    UFLRA (Unsat Core Track)

    - + Z3n 0 160.3640.3640 @@ -221,7 +221,7 @@

    UFLRA (Unsat Core Track)

    - + CVC4-uc 0 130.1780.1780 @@ -230,7 +230,7 @@

    UFLRA (Unsat Core Track)

    - + UltimateEliminator+SMTInterpol 0 036.70422.1820 @@ -239,7 +239,7 @@

    UFLRA (Unsat Core Track)

    - + UltimateEliminator+Yices-2.6.1 0 036.23422.3280 @@ -248,7 +248,7 @@

    UFLRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 036.73723.2140 @@ -272,7 +272,6 @@

    UFLRA (Unsat Core Track)

    - + - diff --git a/archive/2019/results/ufnia-single-query.html b/archive/2019/results/ufnia-single-query.html index 44affe98..9242192f 100644 --- a/archive/2019/results/ufnia-single-query.html +++ b/archive/2019/results/ufnia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFNIA (Single Query Track)

    Competition results for the UFNIA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UFNIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Par4Par4Par4 - - + + Par4 - - + + Par4 - + @@ -131,7 +131,7 @@

    UFNIA (Single Query Track)

    - + Par4 0 3506 @@ -142,7 +142,7 @@

    UFNIA (Single Query Track)

    - + CVC4-SymBreakn 0 3325 @@ -153,7 +153,7 @@

    UFNIA (Single Query Track)

    - + CVC4 0 3316 @@ -164,7 +164,7 @@

    UFNIA (Single Query Track)

    - + Z3n 0 2496 @@ -175,7 +175,7 @@

    UFNIA (Single Query Track)

    - + 2018-Z3n 0 2462 @@ -186,7 +186,7 @@

    UFNIA (Single Query Track)

    - + Vampire 0 2352 @@ -197,7 +197,7 @@

    UFNIA (Single Query Track)

    - + 2018-Vampiren 0 2137 @@ -208,7 +208,7 @@

    UFNIA (Single Query Track)

    - + Alt-Ergo 0 1001 @@ -219,7 +219,7 @@

    UFNIA (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 20 @@ -230,7 +230,7 @@

    UFNIA (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 0 @@ -252,7 +252,7 @@

    UFNIA (Single Query Track)

    - + 2018-Vampiren 0 377416115465.2977407890.40737740377422192219 @@ -261,7 +261,7 @@

    UFNIA (Single Query Track)

    - + Par4 0 35136234500.0956097428.7713513455305824802470 @@ -270,7 +270,7 @@

    UFNIA (Single Query Track)

    - + CVC4-SymBreakn 0 33256857534.1546890865.673325453287226682666 @@ -279,7 +279,7 @@

    UFNIA (Single Query Track)

    - + CVC4 0 33166828611.5446843968.5933316455286126772675 @@ -288,7 +288,7 @@

    UFNIA (Single Query Track)

    - + Vampire 0 251010614412.9318762318.36625100251034833483 @@ -297,7 +297,7 @@

    UFNIA (Single Query Track)

    - + Z3n 0 24966528991.3786532128.4282496355214134972466 @@ -306,7 +306,7 @@

    UFNIA (Single Query Track)

    - + 2018-Z3n 0 24626569187.6976571543.0132462347211535312481 @@ -315,7 +315,7 @@

    UFNIA (Single Query Track)

    - + Alt-Ergo 0 10418881364.9497084848.12210410104149522392 @@ -324,7 +324,7 @@

    UFNIA (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 20965056.527507410.633200205973128 @@ -333,7 +333,7 @@

    UFNIA (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 0985239.123541737.7760005993145 @@ -353,7 +353,7 @@

    UFNIA (Single Query Track)

    - + Par4 0 45546158.8632894.924455455055382470 @@ -362,7 +362,7 @@

    UFNIA (Single Query Track)

    - + CVC4 0 45557428.93258743.389455455055382675 @@ -371,7 +371,7 @@

    UFNIA (Single Query Track)

    - + CVC4-SymBreakn 0 45360083.22861034.461453453055402666 @@ -380,7 +380,7 @@

    UFNIA (Single Query Track)

    - + Z3n 0 355242862.056243021.43355355056382466 @@ -389,7 +389,7 @@

    UFNIA (Single Query Track)

    - + 2018-Z3n 0 347264897.996264900.095347347056462481 @@ -398,7 +398,7 @@

    UFNIA (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 01581.2931111.2010005993128 @@ -407,7 +407,7 @@

    UFNIA (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 08672.4918213.2960005993145 @@ -416,7 +416,7 @@

    UFNIA (Single Query Track)

    - + Alt-Ergo 0 0174639.733173527.26100059932392 @@ -425,7 +425,7 @@

    UFNIA (Single Query Track)

    - + 2018-Vampiren 0 01672812.851110405.9100059932219 @@ -434,7 +434,7 @@

    UFNIA (Single Query Track)

    - + Vampire 0 01154401.181111085.2700059933483 @@ -454,7 +454,7 @@

    UFNIA (Single Query Track)

    - + 2018-Vampiren 0 37749647335.1473059462.78737740377422192219 @@ -463,7 +463,7 @@

    UFNIA (Single Query Track)

    - + Par4 0 30582948341.2342824533.84730580305829352470 @@ -472,7 +472,7 @@

    UFNIA (Single Query Track)

    - + CVC4-SymBreakn 0 28723559368.2233591740.13128720287231212666 @@ -481,7 +481,7 @@

    UFNIA (Single Query Track)

    - + CVC4 0 28613532619.5583546642.62828610286131322675 @@ -490,7 +490,7 @@

    UFNIA (Single Query Track)

    - + Vampire 0 25105953604.9714411852.07625100251034833483 @@ -499,7 +499,7 @@

    UFNIA (Single Query Track)

    - + Z3n 0 21413636561.0633637807.66921410214138522466 @@ -508,7 +508,7 @@

    UFNIA (Single Query Track)

    - + 2018-Z3n 0 21153651782.2743652119.1521150211538782481 @@ -517,7 +517,7 @@

    UFNIA (Single Query Track)

    - + Alt-Ergo 0 10416583449.7355039440.00210410104149522392 @@ -526,7 +526,7 @@

    UFNIA (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 20958922.89502894.916200205973128 @@ -535,7 +535,7 @@

    UFNIA (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 0955356.106513608.0270005993145 @@ -555,7 +555,7 @@

    UFNIA (Single Query Track)

    - + Par4 0 290780665.31777643.122907410249730863076 @@ -564,7 +564,7 @@

    UFNIA (Single Query Track)

    - + CVC4 0 243090359.41590311.82430393203735633562 @@ -573,7 +573,7 @@

    UFNIA (Single Query Track)

    - + CVC4-SymBreakn 0 239891389.8391344.7932398390200835953594 @@ -582,7 +582,7 @@

    UFNIA (Single Query Track)

    - + Z3n 0 229988961.38288925.0022299329197036943536 @@ -591,7 +591,7 @@

    UFNIA (Single Query Track)

    - + 2018-Z3n 0 224589833.51989811.5812245315193037483585 @@ -600,7 +600,7 @@

    UFNIA (Single Query Track)

    - + 2018-Vampiren 0 1521116058.676109689.50315210152144724472 @@ -609,7 +609,7 @@

    UFNIA (Single Query Track)

    - + Vampire 0 1455125364.932113305.25614550145545384538 @@ -618,7 +618,7 @@

    UFNIA (Single Query Track)

    - + Alt-Ergo 0 841101444.89595404.638841084151523695 @@ -627,7 +627,7 @@

    UFNIA (Single Query Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 2025734.31819991.943200205973257 @@ -636,7 +636,7 @@

    UFNIA (Single Query Track)

    - + UltimateEliminator+Yices-2.6.1 0 025414.98419702.560005993270 @@ -660,7 +660,6 @@

    UFNIA (Single Query Track)

    - + - diff --git a/archive/2019/results/ufnia-unsat-core.html b/archive/2019/results/ufnia-unsat-core.html index 058d6d4b..e72799a5 100644 --- a/archive/2019/results/ufnia-unsat-core.html +++ b/archive/2019/results/ufnia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFNIA (Unsat Core Track)

    Competition results for the UFNIA - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    UFNIA (Unsat Core Track)

    Sequential PerformanceParallel Performance CVC4-ucCVC4-uc - - + + @@ -126,7 +126,7 @@

    UFNIA (Unsat Core Track)

    - + 2018-CVC4 (unsat core)n 0 99835 @@ -137,7 +137,7 @@

    UFNIA (Unsat Core Track)

    - + CVC4-uc 0 99374 @@ -148,7 +148,7 @@

    UFNIA (Unsat Core Track)

    - + Z3n 0 74256 @@ -159,7 +159,7 @@

    UFNIA (Unsat Core Track)

    - + UltimateEliminator+Yices-2.6.1 0 0 @@ -170,7 +170,7 @@

    UFNIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 0 @@ -192,7 +192,7 @@

    UFNIA (Unsat Core Track)

    - + 2018-CVC4 (unsat core)n 0 99835614972.868614916.102228 @@ -201,7 +201,7 @@

    UFNIA (Unsat Core Track)

    - + CVC4-uc 0 99374616480.938616484.579231 @@ -210,7 +210,7 @@

    UFNIA (Unsat Core Track)

    - + Z3n 0 74256234025.538234045.36459 @@ -219,7 +219,7 @@

    UFNIA (Unsat Core Track)

    - + UltimateEliminator+Yices-2.6.1 0 05078.1543606.3410 @@ -228,7 +228,7 @@

    UFNIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT-5.5.4 0 01447819.933746932.036196 @@ -252,7 +252,6 @@

    UFNIA (Unsat Core Track)

    - + - diff --git a/archive/2019/specs.html b/archive/2019/specs.html index 09fefddc..08713728 100644 --- a/archive/2019/specs.html +++ b/archive/2019/specs.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -96,7 +96,6 @@

    Machine Specifications

    - + - diff --git a/archive/2019/tools.html b/archive/2019/tools.html index b243bfe1..77c947bd 100644 --- a/archive/2019/tools.html +++ b/archive/2019/tools.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2019 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -70,8 +70,8 @@

    Tools

    Pre-Processor (Benchmark Scrambler)

    GitHub Repository
    Sources

    SMT-COMP 2019 Releases

      - - + +
    • Single Query Track
      • Binary @@ -79,8 +79,8 @@

        SMT-COMP 2019 Releases

        available on StarExec as SMT-COMP 2019 Non-Incremental Scrambler (id: 551)
    • - - + +
    • Incremental Track
      • Binary @@ -88,8 +88,8 @@

        SMT-COMP 2019 Releases

        available on StarExec as SMT-COMP 2019 Incremental Scrambler (id: 595)
    • - - + +
    • Challenge Track (non-incremental)
      • Binary @@ -97,8 +97,8 @@

        SMT-COMP 2019 Releases

        available on StarExec as SMT-COMP 2019 Non-Incremental Scrambler (id: 551)
    • - - + +
    • Challenge Track (incremental)
      • Binary @@ -106,8 +106,8 @@

        SMT-COMP 2019 Releases

        available on StarExec as SMT-COMP 2019 Incremental Scrambler (id: 595)
    • - - + +
    • Unsat Core Track
      • Binary @@ -115,8 +115,8 @@

        SMT-COMP 2019 Releases

        available on StarExec as SMT-COMP 2019 Unsat-Core Scrambler (id: 589)
    • - - + +
    • Model Validation Track (experimental)
      • Binary @@ -124,15 +124,15 @@

        SMT-COMP 2019 Releases

        available on StarExec as SMT-COMP 2019 Model-Validation Scrambler (id: 554)
    • - - + +

    Post-Processor

    GitHub Repository
    Sources

    SMT-COMP 2019 Releases

      - - + +
    • Single Query Track
      • Binary @@ -140,8 +140,8 @@

        SMT-COMP 2019 Releases

        available on StarExec as SMT-COMP 2019 Non-Incremental (id: 555)
    • - - + +
    • Incremental Track
      • Binary @@ -149,8 +149,8 @@

        SMT-COMP 2019 Releases

        available on StarExec as SMT-COMP 2019 Incremental (id: 556)
    • - - + +
    • Challenge Track (non-incremental)
      • Binary @@ -158,8 +158,8 @@

        SMT-COMP 2019 Releases

        available on StarExec as SMT-COMP 2019 Non-Incremental (id: 555)
    • - - + +
    • Challenge Track (incremental)
      • Binary @@ -167,8 +167,8 @@

        SMT-COMP 2019 Releases

        available on StarExec as SMT-COMP 2019 Incremental (id: 556)
    • - - + +
    • Unsat Core Track
      • Binary @@ -176,8 +176,8 @@

        SMT-COMP 2019 Releases

        available on StarExec as SMT-COMP 2019 Unsat-Core (id: 594)
    • - - + +
    • Model Validation Track (experimental)
      • Binary @@ -185,8 +185,8 @@

        SMT-COMP 2019 Releases

        available on StarExec as SMT-COMP 2019 Model-Validation (id: 587)
    • - - + +

    Trace executor

    GitHub Repository
    Sources
    Binary
    @@ -194,7 +194,7 @@

    Trace executor

    G
    All solvers wrapped with the Trace executor are available
    here.

    - + @@ -207,7 +207,6 @@

    Trace executor

    G - + - diff --git a/archive/2020/benchmarks.html b/archive/2020/benchmarks.html index 5f78e6c4..17693dd5 100644 --- a/archive/2020/benchmarks.html +++ b/archive/2020/benchmarks.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -652,7 +652,6 @@

    Benchmarks

    - + - diff --git a/archive/2020/disagreements/fp-single-query.html b/archive/2020/disagreements/fp-single-query.html index 2bfa8ecc..75a260ad 100644 --- a/archive/2020/disagreements/fp-single-query.html +++ b/archive/2020/disagreements/fp-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -86,7 +86,7 @@

    Solver Disagreements on FP (Single Query Track)

  • 2019-Z3n: unsat
  • - +

    @@ -107,7 +107,7 @@

    Solver Disagreements on FP (Single Query Track)

  • 2019-Z3n: unsat
  • - +

    @@ -128,7 +128,7 @@

    Solver Disagreements on FP (Single Query Track)

  • 2019-Z3n: unsat
  • - +

    @@ -149,7 +149,7 @@

    Solver Disagreements on FP (Single Query Track)

  • 2019-Z3n: unsat
  • - +

    @@ -170,7 +170,7 @@

    Solver Disagreements on FP (Single Query Track)

  • 2019-Z3n: unsat
  • - +

    @@ -191,7 +191,7 @@

    Solver Disagreements on FP (Single Query Track)

  • 2019-Z3n: unsat
  • - +

    @@ -212,7 +212,7 @@

    Solver Disagreements on FP (Single Query Track)

  • 2019-Z3n: unsat
  • - +

    @@ -236,7 +236,6 @@

    Solver Disagreements on FP (Single Query Track)

    - + - diff --git a/archive/2020/disagreements/lia-single-query.html b/archive/2020/disagreements/lia-single-query.html index 5763cbdc..d8dcd273 100644 --- a/archive/2020/disagreements/lia-single-query.html +++ b/archive/2020/disagreements/lia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -83,7 +83,7 @@

    Solver Disagreements on LIA (Single Query Track)

  • z3n: sat
  • - +

    @@ -107,7 +107,6 @@

    Solver Disagreements on LIA (Single Query Track)

    - + - diff --git a/archive/2020/disagreements/qf-abvfplra-single-query.html b/archive/2020/disagreements/qf-abvfplra-single-query.html index 5a90a9ef..58affb3f 100644 --- a/archive/2020/disagreements/qf-abvfplra-single-query.html +++ b/archive/2020/disagreements/qf-abvfplra-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -83,7 +83,7 @@

    Solver Disagreements on QF_ABVFPLRA (Single Query Track)

  • CVC4: unsat
  • - +

    @@ -101,7 +101,7 @@

    Solver Disagreements on QF_ABVFPLRA (Single Query Track)

  • CVC4: unsat
  • - +

    @@ -119,7 +119,7 @@

    Solver Disagreements on QF_ABVFPLRA (Single Query Track)

  • CVC4: unsat
  • - +

    @@ -143,7 +143,6 @@

    Solver Disagreements on QF_ABVFPLRA (Single Query Track)

    - + - diff --git a/archive/2020/disagreements/qf-bvfp-incremental.html b/archive/2020/disagreements/qf-bvfp-incremental.html index 42a23dc5..7ee67f47 100644 --- a/archive/2020/disagreements/qf-bvfp-incremental.html +++ b/archive/2020/disagreements/qf-bvfp-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -92,7 +92,7 @@

    Solver Disagreements on QF_BVFP (Incremental Track)

  • MathSAT5n: unsat
  • - +

    @@ -116,7 +116,6 @@

    Solver Disagreements on QF_BVFP (Incremental Track)

    - + - diff --git a/archive/2020/disagreements/qf-s-single-query.html b/archive/2020/disagreements/qf-s-single-query.html index 329b3869..86a9ef31 100644 --- a/archive/2020/disagreements/qf-s-single-query.html +++ b/archive/2020/disagreements/qf-s-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -83,7 +83,7 @@

    Solver Disagreements on QF_S (Single Query Track)

  • CVC4: sat
  • - +

    @@ -101,7 +101,7 @@

    Solver Disagreements on QF_S (Single Query Track)

  • CVC4: sat
  • - +

    @@ -125,7 +125,6 @@

    Solver Disagreements on QF_S (Single Query Track)

    - + - diff --git a/archive/2020/disagreements/qf-slia-single-query.html b/archive/2020/disagreements/qf-slia-single-query.html index ad0f7948..4e730444 100644 --- a/archive/2020/disagreements/qf-slia-single-query.html +++ b/archive/2020/disagreements/qf-slia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -83,7 +83,7 @@

    Solver Disagreements on QF_SLIA (Single Query Track)

  • CVC4: unsat
  • - +

    @@ -101,7 +101,7 @@

    Solver Disagreements on QF_SLIA (Single Query Track)

  • CVC4: unsat
  • - +

    @@ -119,7 +119,7 @@

    Solver Disagreements on QF_SLIA (Single Query Track)

  • CVC4: unsat
  • - +

    @@ -137,7 +137,7 @@

    Solver Disagreements on QF_SLIA (Single Query Track)

  • CVC4: unsat
  • - +

    @@ -155,7 +155,7 @@

    Solver Disagreements on QF_SLIA (Single Query Track)

  • CVC4: unsat
  • - +

    @@ -173,7 +173,7 @@

    Solver Disagreements on QF_SLIA (Single Query Track)

  • CVC4: unsat
  • - +

    @@ -191,7 +191,7 @@

    Solver Disagreements on QF_SLIA (Single Query Track)

  • CVC4: unsat
  • - +

    @@ -209,7 +209,7 @@

    Solver Disagreements on QF_SLIA (Single Query Track)

  • CVC4: unsat
  • - +

    @@ -227,7 +227,7 @@

    Solver Disagreements on QF_SLIA (Single Query Track)

  • CVC4: unsat
  • - +

    @@ -245,7 +245,7 @@

    Solver Disagreements on QF_SLIA (Single Query Track)

  • CVC4: unsat
  • - +

    @@ -263,7 +263,7 @@

    Solver Disagreements on QF_SLIA (Single Query Track)

  • CVC4: unsat
  • - +

    @@ -281,7 +281,7 @@

    Solver Disagreements on QF_SLIA (Single Query Track)

  • CVC4: unsat
  • - +

    @@ -299,7 +299,7 @@

    Solver Disagreements on QF_SLIA (Single Query Track)

  • CVC4: unsat
  • - +

    @@ -317,7 +317,7 @@

    Solver Disagreements on QF_SLIA (Single Query Track)

  • CVC4: unsat
  • - +

    @@ -335,7 +335,7 @@

    Solver Disagreements on QF_SLIA (Single Query Track)

  • CVC4: unsat
  • - +

    @@ -353,7 +353,7 @@

    Solver Disagreements on QF_SLIA (Single Query Track)

  • CVC4: unsat
  • - +

    @@ -371,7 +371,7 @@

    Solver Disagreements on QF_SLIA (Single Query Track)

  • CVC4: unsat
  • - +

    @@ -389,7 +389,7 @@

    Solver Disagreements on QF_SLIA (Single Query Track)

  • CVC4: unsat
  • - +

    @@ -407,7 +407,7 @@

    Solver Disagreements on QF_SLIA (Single Query Track)

  • CVC4: unsat
  • - +

    @@ -425,7 +425,7 @@

    Solver Disagreements on QF_SLIA (Single Query Track)

  • CVC4: unsat
  • - +

    @@ -443,7 +443,7 @@

    Solver Disagreements on QF_SLIA (Single Query Track)

  • CVC4: unsat
  • - +

    @@ -461,7 +461,7 @@

    Solver Disagreements on QF_SLIA (Single Query Track)

  • CVC4: unsat
  • - +

    @@ -479,7 +479,7 @@

    Solver Disagreements on QF_SLIA (Single Query Track)

  • CVC4: unsat
  • - +

    @@ -497,7 +497,7 @@

    Solver Disagreements on QF_SLIA (Single Query Track)

  • CVC4: unsat
  • - +

    @@ -515,7 +515,7 @@

    Solver Disagreements on QF_SLIA (Single Query Track)

  • CVC4: unsat
  • - +

    @@ -533,7 +533,7 @@

    Solver Disagreements on QF_SLIA (Single Query Track)

  • CVC4: unsat
  • - +

    @@ -551,7 +551,7 @@

    Solver Disagreements on QF_SLIA (Single Query Track)

  • CVC4: unsat
  • - +

    @@ -569,7 +569,7 @@

    Solver Disagreements on QF_SLIA (Single Query Track)

  • CVC4: unsat
  • - +

    @@ -587,7 +587,7 @@

    Solver Disagreements on QF_SLIA (Single Query Track)

  • CVC4: unsat
  • - +

    @@ -605,7 +605,7 @@

    Solver Disagreements on QF_SLIA (Single Query Track)

  • CVC4: unsat
  • - +

    @@ -623,7 +623,7 @@

    Solver Disagreements on QF_SLIA (Single Query Track)

  • CVC4: unsat
  • - +

    @@ -641,7 +641,7 @@

    Solver Disagreements on QF_SLIA (Single Query Track)

  • CVC4: unsat
  • - +

    @@ -659,7 +659,7 @@

    Solver Disagreements on QF_SLIA (Single Query Track)

  • CVC4: unsat
  • - +

    @@ -677,7 +677,7 @@

    Solver Disagreements on QF_SLIA (Single Query Track)

  • CVC4: unsat
  • - +

    @@ -695,7 +695,7 @@

    Solver Disagreements on QF_SLIA (Single Query Track)

  • CVC4: unsat
  • - +

    @@ -713,7 +713,7 @@

    Solver Disagreements on QF_SLIA (Single Query Track)

  • CVC4: unsat
  • - +

    @@ -731,7 +731,7 @@

    Solver Disagreements on QF_SLIA (Single Query Track)

  • CVC4: unsat
  • - +

    @@ -749,7 +749,7 @@

    Solver Disagreements on QF_SLIA (Single Query Track)

  • CVC4: unsat
  • - +

    @@ -767,7 +767,7 @@

    Solver Disagreements on QF_SLIA (Single Query Track)

  • CVC4: unsat
  • - +

    @@ -785,7 +785,7 @@

    Solver Disagreements on QF_SLIA (Single Query Track)

  • CVC4: unsat
  • - +

    @@ -803,7 +803,7 @@

    Solver Disagreements on QF_SLIA (Single Query Track)

  • CVC4: unsat
  • - +

    @@ -821,7 +821,7 @@

    Solver Disagreements on QF_SLIA (Single Query Track)

  • CVC4: unsat
  • - +

    @@ -839,7 +839,7 @@

    Solver Disagreements on QF_SLIA (Single Query Track)

  • CVC4: unsat
  • - +

    @@ -857,7 +857,7 @@

    Solver Disagreements on QF_SLIA (Single Query Track)

  • CVC4: unsat
  • - +

    @@ -875,7 +875,7 @@

    Solver Disagreements on QF_SLIA (Single Query Track)

  • CVC4: unsat
  • - +

    @@ -893,7 +893,7 @@

    Solver Disagreements on QF_SLIA (Single Query Track)

  • CVC4: unsat
  • - +

    @@ -911,7 +911,7 @@

    Solver Disagreements on QF_SLIA (Single Query Track)

  • CVC4: unsat
  • - +

    @@ -929,7 +929,7 @@

    Solver Disagreements on QF_SLIA (Single Query Track)

  • CVC4: unsat
  • - +

    @@ -947,7 +947,7 @@

    Solver Disagreements on QF_SLIA (Single Query Track)

  • CVC4: unsat
  • - +

    @@ -965,7 +965,7 @@

    Solver Disagreements on QF_SLIA (Single Query Track)

  • CVC4: unsat
  • - +

    @@ -983,7 +983,7 @@

    Solver Disagreements on QF_SLIA (Single Query Track)

  • CVC4: unsat
  • - +

    @@ -1001,7 +1001,7 @@

    Solver Disagreements on QF_SLIA (Single Query Track)

  • CVC4: unsat
  • - +

    @@ -1019,7 +1019,7 @@

    Solver Disagreements on QF_SLIA (Single Query Track)

  • CVC4: unsat
  • - +

    @@ -1037,7 +1037,7 @@

    Solver Disagreements on QF_SLIA (Single Query Track)

  • CVC4: unsat
  • - +

    @@ -1055,7 +1055,7 @@

    Solver Disagreements on QF_SLIA (Single Query Track)

  • CVC4: unsat
  • - +

    @@ -1073,7 +1073,7 @@

    Solver Disagreements on QF_SLIA (Single Query Track)

  • CVC4: unsat
  • - +

    @@ -1097,7 +1097,6 @@

    Solver Disagreements on QF_SLIA (Single Query Track)

    - + - diff --git a/archive/2020/disagreements/uf-incremental.html b/archive/2020/disagreements/uf-incremental.html index 6826516e..46bd835a 100644 --- a/archive/2020/disagreements/uf-incremental.html +++ b/archive/2020/disagreements/uf-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -89,7 +89,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -113,7 +113,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -137,7 +137,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -161,7 +161,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -185,7 +185,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -209,7 +209,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -233,7 +233,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -257,7 +257,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -281,7 +281,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -305,7 +305,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -329,7 +329,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -353,7 +353,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -377,7 +377,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -401,7 +401,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -425,7 +425,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -449,7 +449,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -473,7 +473,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -497,7 +497,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -521,7 +521,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -545,7 +545,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -569,7 +569,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -587,7 +587,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: unsat
  • - +

    @@ -611,7 +611,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -635,7 +635,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -659,7 +659,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -683,7 +683,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -707,7 +707,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -731,7 +731,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -755,7 +755,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -779,7 +779,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -803,7 +803,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -827,7 +827,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -851,7 +851,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -875,7 +875,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -899,7 +899,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -923,7 +923,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -947,7 +947,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -971,7 +971,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -995,7 +995,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -1019,7 +1019,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -1043,7 +1043,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -1067,7 +1067,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -1091,7 +1091,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -1115,7 +1115,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -1139,7 +1139,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: unsat
  • - +

    @@ -1163,7 +1163,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: unsat
  • - +

    @@ -1187,7 +1187,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: unsat
  • - +

    @@ -1211,7 +1211,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: unsat
  • - +

    @@ -1235,7 +1235,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: unsat
  • - +

    @@ -1259,7 +1259,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -1283,7 +1283,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -1307,7 +1307,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -1331,7 +1331,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -1355,7 +1355,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -1379,7 +1379,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -1403,7 +1403,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -1427,7 +1427,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -1451,7 +1451,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -1475,7 +1475,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -1499,7 +1499,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -1523,7 +1523,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -1547,7 +1547,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -1571,7 +1571,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -1595,7 +1595,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -1619,7 +1619,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -1643,7 +1643,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -1667,7 +1667,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -1691,7 +1691,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -1715,7 +1715,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -1739,7 +1739,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -1763,7 +1763,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -1787,7 +1787,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -1811,7 +1811,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -1835,7 +1835,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -1859,7 +1859,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -1883,7 +1883,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -1907,7 +1907,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -1931,7 +1931,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -1955,7 +1955,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -1979,7 +1979,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -2003,7 +2003,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -2027,7 +2027,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -2051,7 +2051,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -2075,7 +2075,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -2099,7 +2099,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -2123,7 +2123,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -2147,7 +2147,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -2171,7 +2171,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -2195,7 +2195,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -2219,7 +2219,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -2243,7 +2243,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -2267,7 +2267,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -2291,7 +2291,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -2315,7 +2315,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -2339,7 +2339,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -2363,7 +2363,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -2387,7 +2387,7 @@

    Solver Disagreements on UF (Incremental Track)

  • SMTInterpol: sat
  • - +

    @@ -2411,7 +2411,6 @@

    Solver Disagreements on UF (Incremental Track)

    - + - diff --git a/archive/2020/disagreements/ufnia-single-query.html b/archive/2020/disagreements/ufnia-single-query.html index 3fd43c93..479af547 100644 --- a/archive/2020/disagreements/ufnia-single-query.html +++ b/archive/2020/disagreements/ufnia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -92,7 +92,7 @@

    Solver Disagreements on UFNIA (Single Query Track)

  • 2019-Par4n: sat
  • - +

    @@ -119,7 +119,7 @@

    Solver Disagreements on UFNIA (Single Query Track)

  • 2019-Par4n: sat
  • - +

    @@ -143,7 +143,7 @@

    Solver Disagreements on UFNIA (Single Query Track)

  • 2019-Par4n: sat
  • - +

    @@ -173,7 +173,7 @@

    Solver Disagreements on UFNIA (Single Query Track)

  • 2019-Par4n: sat
  • - +

    @@ -200,7 +200,7 @@

    Solver Disagreements on UFNIA (Single Query Track)

  • 2019-Par4n: sat
  • - +

    @@ -230,7 +230,7 @@

    Solver Disagreements on UFNIA (Single Query Track)

  • 2019-Par4n: sat
  • - +

    @@ -257,7 +257,7 @@

    Solver Disagreements on UFNIA (Single Query Track)

  • 2019-Par4n: sat
  • - +

    @@ -287,7 +287,7 @@

    Solver Disagreements on UFNIA (Single Query Track)

  • 2019-Par4n: sat
  • - +

    @@ -311,7 +311,7 @@

    Solver Disagreements on UFNIA (Single Query Track)

  • 2019-Par4n: sat
  • - +

    @@ -338,7 +338,7 @@

    Solver Disagreements on UFNIA (Single Query Track)

  • 2019-Par4n: sat
  • - +

    @@ -365,7 +365,7 @@

    Solver Disagreements on UFNIA (Single Query Track)

  • 2019-Par4n: sat
  • - +

    @@ -392,7 +392,7 @@

    Solver Disagreements on UFNIA (Single Query Track)

  • 2019-Par4n: sat
  • - +

    @@ -419,7 +419,7 @@

    Solver Disagreements on UFNIA (Single Query Track)

  • 2019-Par4n: sat
  • - +

    @@ -446,7 +446,7 @@

    Solver Disagreements on UFNIA (Single Query Track)

  • 2019-Par4n: sat
  • - +

    @@ -473,7 +473,7 @@

    Solver Disagreements on UFNIA (Single Query Track)

  • 2019-Par4n: sat
  • - +

    @@ -500,7 +500,7 @@

    Solver Disagreements on UFNIA (Single Query Track)

  • 2019-Par4n: sat
  • - +

    @@ -527,7 +527,7 @@

    Solver Disagreements on UFNIA (Single Query Track)

  • 2019-Par4n: sat
  • - +

    @@ -554,7 +554,7 @@

    Solver Disagreements on UFNIA (Single Query Track)

  • 2019-Par4n: sat
  • - +

    @@ -581,7 +581,7 @@

    Solver Disagreements on UFNIA (Single Query Track)

  • 2019-Par4n: sat
  • - +

    @@ -608,7 +608,7 @@

    Solver Disagreements on UFNIA (Single Query Track)

  • 2019-Par4n: sat
  • - +

    @@ -635,7 +635,7 @@

    Solver Disagreements on UFNIA (Single Query Track)

  • 2019-Par4n: sat
  • - +

    @@ -662,7 +662,7 @@

    Solver Disagreements on UFNIA (Single Query Track)

  • 2019-Par4n: sat
  • - +

    @@ -689,7 +689,7 @@

    Solver Disagreements on UFNIA (Single Query Track)

  • 2019-Par4n: sat
  • - +

    @@ -716,7 +716,7 @@

    Solver Disagreements on UFNIA (Single Query Track)

  • 2019-Par4n: sat
  • - +

    @@ -743,7 +743,7 @@

    Solver Disagreements on UFNIA (Single Query Track)

  • 2019-Par4n: sat
  • - +

    @@ -767,7 +767,7 @@

    Solver Disagreements on UFNIA (Single Query Track)

  • 2019-Par4n: sat
  • - +

    @@ -794,7 +794,7 @@

    Solver Disagreements on UFNIA (Single Query Track)

  • 2019-Par4n: sat
  • - +

    @@ -821,7 +821,7 @@

    Solver Disagreements on UFNIA (Single Query Track)

  • 2019-Par4n: sat
  • - +

    @@ -845,7 +845,7 @@

    Solver Disagreements on UFNIA (Single Query Track)

  • 2019-Par4n: sat
  • - +

    @@ -872,7 +872,7 @@

    Solver Disagreements on UFNIA (Single Query Track)

  • 2019-Par4n: sat
  • - +

    @@ -899,7 +899,7 @@

    Solver Disagreements on UFNIA (Single Query Track)

  • 2019-Par4n: sat
  • - +

    @@ -926,7 +926,7 @@

    Solver Disagreements on UFNIA (Single Query Track)

  • 2019-Par4n: sat
  • - +

    @@ -953,7 +953,7 @@

    Solver Disagreements on UFNIA (Single Query Track)

  • 2019-Par4n: sat
  • - +

    @@ -980,7 +980,7 @@

    Solver Disagreements on UFNIA (Single Query Track)

  • 2019-Par4n: sat
  • - +

    @@ -1007,7 +1007,7 @@

    Solver Disagreements on UFNIA (Single Query Track)

  • 2019-Par4n: sat
  • - +

    @@ -1034,7 +1034,7 @@

    Solver Disagreements on UFNIA (Single Query Track)

  • 2019-Par4n: sat
  • - +

    @@ -1061,7 +1061,7 @@

    Solver Disagreements on UFNIA (Single Query Track)

  • 2019-Par4n: sat
  • - +

    @@ -1088,7 +1088,7 @@

    Solver Disagreements on UFNIA (Single Query Track)

  • 2019-Par4n: sat
  • - +

    @@ -1115,7 +1115,7 @@

    Solver Disagreements on UFNIA (Single Query Track)

  • 2019-Par4n: sat
  • - +

    @@ -1142,7 +1142,7 @@

    Solver Disagreements on UFNIA (Single Query Track)

  • 2019-Par4n: sat
  • - +

    @@ -1166,7 +1166,7 @@

    Solver Disagreements on UFNIA (Single Query Track)

  • 2019-Par4n: sat
  • - +

    @@ -1193,7 +1193,7 @@

    Solver Disagreements on UFNIA (Single Query Track)

  • 2019-Par4n: sat
  • - +

    @@ -1220,7 +1220,7 @@

    Solver Disagreements on UFNIA (Single Query Track)

  • 2019-Par4n: sat
  • - +

    @@ -1247,7 +1247,7 @@

    Solver Disagreements on UFNIA (Single Query Track)

  • 2019-Par4n: sat
  • - +

    @@ -1268,7 +1268,7 @@

    Solver Disagreements on UFNIA (Single Query Track)

  • 2019-Par4n: sat
  • - +

    @@ -1289,7 +1289,7 @@

    Solver Disagreements on UFNIA (Single Query Track)

  • 2019-Par4n: sat
  • - +

    @@ -1313,7 +1313,6 @@

    Solver Disagreements on UFNIA (Single Query Track)

    - + - diff --git a/archive/2020/divisions/abv.html b/archive/2020/divisions/abv.html index fbf1057e..88c74062 100644 --- a/archive/2020/divisions/abv.html +++ b/archive/2020/divisions/abv.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -91,7 +91,6 @@

    Tracks

    - + - diff --git a/archive/2020/divisions/abvfp.html b/archive/2020/divisions/abvfp.html index 75ad4904..16d0e8b2 100644 --- a/archive/2020/divisions/abvfp.html +++ b/archive/2020/divisions/abvfp.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -91,7 +91,6 @@

    Tracks

    - + - diff --git a/archive/2020/divisions/abvfplra.html b/archive/2020/divisions/abvfplra.html index 62641778..44e3909f 100644 --- a/archive/2020/divisions/abvfplra.html +++ b/archive/2020/divisions/abvfplra.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -91,7 +91,6 @@

    Tracks

    - + - diff --git a/archive/2020/divisions/alia.html b/archive/2020/divisions/alia.html index 530bc037..922fb284 100644 --- a/archive/2020/divisions/alia.html +++ b/archive/2020/divisions/alia.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -91,7 +91,6 @@

    Tracks

    - + - diff --git a/archive/2020/divisions/ania.html b/archive/2020/divisions/ania.html index db02da66..e8bc2a58 100644 --- a/archive/2020/divisions/ania.html +++ b/archive/2020/divisions/ania.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -91,7 +91,6 @@

    Tracks

    - + - diff --git a/archive/2020/divisions/aufbvdtlia.html b/archive/2020/divisions/aufbvdtlia.html index ad1612a9..f1b2d803 100644 --- a/archive/2020/divisions/aufbvdtlia.html +++ b/archive/2020/divisions/aufbvdtlia.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -91,7 +91,6 @@

    Tracks

    - + - diff --git a/archive/2020/divisions/aufbvdtnia.html b/archive/2020/divisions/aufbvdtnia.html index 2ab7f269..d0c25fbe 100644 --- a/archive/2020/divisions/aufbvdtnia.html +++ b/archive/2020/divisions/aufbvdtnia.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -96,7 +96,6 @@

    Notes

    - + - diff --git a/archive/2020/divisions/aufdtlia.html b/archive/2020/divisions/aufdtlia.html index 5863f33a..95fa6de5 100644 --- a/archive/2020/divisions/aufdtlia.html +++ b/archive/2020/divisions/aufdtlia.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -91,7 +91,6 @@

    Tracks

    - + - diff --git a/archive/2020/divisions/aufdtlira.html b/archive/2020/divisions/aufdtlira.html index d50c66e8..b65b1f47 100644 --- a/archive/2020/divisions/aufdtlira.html +++ b/archive/2020/divisions/aufdtlira.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -91,7 +91,6 @@

    Tracks

    - + - diff --git a/archive/2020/divisions/aufdtnira.html b/archive/2020/divisions/aufdtnira.html index 35e90cfc..db7bf726 100644 --- a/archive/2020/divisions/aufdtnira.html +++ b/archive/2020/divisions/aufdtnira.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -91,7 +91,6 @@

    Tracks

    - + - diff --git a/archive/2020/divisions/auffpdtlira.html b/archive/2020/divisions/auffpdtlira.html index dc6bb57a..f76fa547 100644 --- a/archive/2020/divisions/auffpdtlira.html +++ b/archive/2020/divisions/auffpdtlira.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -91,7 +91,6 @@

    Tracks

    - + - diff --git a/archive/2020/divisions/auffpdtnira.html b/archive/2020/divisions/auffpdtnira.html index f9cec3d5..3273b961 100644 --- a/archive/2020/divisions/auffpdtnira.html +++ b/archive/2020/divisions/auffpdtnira.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -96,7 +96,6 @@

    Notes

    - + - diff --git a/archive/2020/divisions/auflia.html b/archive/2020/divisions/auflia.html index ad0fc0f0..3a12ef73 100644 --- a/archive/2020/divisions/auflia.html +++ b/archive/2020/divisions/auflia.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -91,7 +91,6 @@

    Tracks

    - + - diff --git a/archive/2020/divisions/auflira.html b/archive/2020/divisions/auflira.html index d626cb24..8a7011dd 100644 --- a/archive/2020/divisions/auflira.html +++ b/archive/2020/divisions/auflira.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -91,7 +91,6 @@

    Tracks

    - + - diff --git a/archive/2020/divisions/aufnia.html b/archive/2020/divisions/aufnia.html index 43a204f5..21c4f9ca 100644 --- a/archive/2020/divisions/aufnia.html +++ b/archive/2020/divisions/aufnia.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -91,7 +91,6 @@

    Tracks

    - + - diff --git a/archive/2020/divisions/aufnira.html b/archive/2020/divisions/aufnira.html index 3ef5e277..8277d8c7 100644 --- a/archive/2020/divisions/aufnira.html +++ b/archive/2020/divisions/aufnira.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -91,7 +91,6 @@

    Tracks

    - + - diff --git a/archive/2020/divisions/bv.html b/archive/2020/divisions/bv.html index 74fbed0b..14eeec9e 100644 --- a/archive/2020/divisions/bv.html +++ b/archive/2020/divisions/bv.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -91,7 +91,6 @@

    Tracks

    - + - diff --git a/archive/2020/divisions/bvfp.html b/archive/2020/divisions/bvfp.html index 4a4e9422..ef89d6f4 100644 --- a/archive/2020/divisions/bvfp.html +++ b/archive/2020/divisions/bvfp.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -91,7 +91,6 @@

    Tracks

    - + - diff --git a/archive/2020/divisions/bvfplra.html b/archive/2020/divisions/bvfplra.html index 81e87c07..c4809304 100644 --- a/archive/2020/divisions/bvfplra.html +++ b/archive/2020/divisions/bvfplra.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -91,7 +91,6 @@

    Tracks

    - + - diff --git a/archive/2020/divisions/fp.html b/archive/2020/divisions/fp.html index 92d070e4..b83aab53 100644 --- a/archive/2020/divisions/fp.html +++ b/archive/2020/divisions/fp.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -91,7 +91,6 @@

    Tracks

    - + - diff --git a/archive/2020/divisions/fplra.html b/archive/2020/divisions/fplra.html index 3ed8a1b6..7669e0b7 100644 --- a/archive/2020/divisions/fplra.html +++ b/archive/2020/divisions/fplra.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -91,7 +91,6 @@

    Tracks

    - + - diff --git a/archive/2020/divisions/lia.html b/archive/2020/divisions/lia.html index 4a727010..0d7e9117 100644 --- a/archive/2020/divisions/lia.html +++ b/archive/2020/divisions/lia.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -91,7 +91,6 @@

    Tracks

    - + - diff --git a/archive/2020/divisions/lra.html b/archive/2020/divisions/lra.html index 3b386238..c1c1c5c9 100644 --- a/archive/2020/divisions/lra.html +++ b/archive/2020/divisions/lra.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -91,7 +91,6 @@

    Tracks

    - + - diff --git a/archive/2020/divisions/nia.html b/archive/2020/divisions/nia.html index 75936bdc..c3c85875 100644 --- a/archive/2020/divisions/nia.html +++ b/archive/2020/divisions/nia.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -91,7 +91,6 @@

    Tracks

    - + - diff --git a/archive/2020/divisions/nra.html b/archive/2020/divisions/nra.html index 8ea4e0b0..1611db93 100644 --- a/archive/2020/divisions/nra.html +++ b/archive/2020/divisions/nra.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -91,7 +91,6 @@

    Tracks

    - + - diff --git a/archive/2020/divisions/qf-abv.html b/archive/2020/divisions/qf-abv.html index 52f42ea0..96847bb1 100644 --- a/archive/2020/divisions/qf-abv.html +++ b/archive/2020/divisions/qf-abv.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -91,7 +91,6 @@

    Tracks

    - + - diff --git a/archive/2020/divisions/qf-abvfp.html b/archive/2020/divisions/qf-abvfp.html index b69b2485..05a66196 100644 --- a/archive/2020/divisions/qf-abvfp.html +++ b/archive/2020/divisions/qf-abvfp.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -91,7 +91,6 @@

    Tracks

    - + - diff --git a/archive/2020/divisions/qf-abvfplra.html b/archive/2020/divisions/qf-abvfplra.html index d24c3332..e2aa2c30 100644 --- a/archive/2020/divisions/qf-abvfplra.html +++ b/archive/2020/divisions/qf-abvfplra.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -95,7 +95,6 @@

    Notes

    - + - diff --git a/archive/2020/divisions/qf-alia.html b/archive/2020/divisions/qf-alia.html index 21eedaae..84ce7dd9 100644 --- a/archive/2020/divisions/qf-alia.html +++ b/archive/2020/divisions/qf-alia.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -91,7 +91,6 @@

    Tracks

    - + - diff --git a/archive/2020/divisions/qf-ania.html b/archive/2020/divisions/qf-ania.html index 9ff4a19b..e200929b 100644 --- a/archive/2020/divisions/qf-ania.html +++ b/archive/2020/divisions/qf-ania.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -96,7 +96,6 @@

    Notes

    - + - diff --git a/archive/2020/divisions/qf-aufbv.html b/archive/2020/divisions/qf-aufbv.html index 720cc2fa..d231471e 100644 --- a/archive/2020/divisions/qf-aufbv.html +++ b/archive/2020/divisions/qf-aufbv.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -91,7 +91,6 @@

    Tracks

    - + - diff --git a/archive/2020/divisions/qf-aufbvlia.html b/archive/2020/divisions/qf-aufbvlia.html index 01da8d71..507d138b 100644 --- a/archive/2020/divisions/qf-aufbvlia.html +++ b/archive/2020/divisions/qf-aufbvlia.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -95,7 +95,6 @@

    Notes

    - + - diff --git a/archive/2020/divisions/qf-aufbvnia.html b/archive/2020/divisions/qf-aufbvnia.html index 99453257..eeed66e9 100644 --- a/archive/2020/divisions/qf-aufbvnia.html +++ b/archive/2020/divisions/qf-aufbvnia.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -95,7 +95,6 @@

    Notes

    - + - diff --git a/archive/2020/divisions/qf-auflia.html b/archive/2020/divisions/qf-auflia.html index 70511ccd..9811941d 100644 --- a/archive/2020/divisions/qf-auflia.html +++ b/archive/2020/divisions/qf-auflia.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -91,7 +91,6 @@

    Tracks

    - + - diff --git a/archive/2020/divisions/qf-aufnia.html b/archive/2020/divisions/qf-aufnia.html index 264f9505..cfa91bc8 100644 --- a/archive/2020/divisions/qf-aufnia.html +++ b/archive/2020/divisions/qf-aufnia.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -95,7 +95,6 @@

    Notes

    - + - diff --git a/archive/2020/divisions/qf-ax.html b/archive/2020/divisions/qf-ax.html index e75f52f5..5473608a 100644 --- a/archive/2020/divisions/qf-ax.html +++ b/archive/2020/divisions/qf-ax.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -91,7 +91,6 @@

    Tracks

    - + - diff --git a/archive/2020/divisions/qf-bv.html b/archive/2020/divisions/qf-bv.html index 67b2d409..a7520b43 100644 --- a/archive/2020/divisions/qf-bv.html +++ b/archive/2020/divisions/qf-bv.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -91,7 +91,6 @@

    Tracks

    - + - diff --git a/archive/2020/divisions/qf-bvfp.html b/archive/2020/divisions/qf-bvfp.html index 5a3cd9d6..84d1bf49 100644 --- a/archive/2020/divisions/qf-bvfp.html +++ b/archive/2020/divisions/qf-bvfp.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -91,7 +91,6 @@

    Tracks

    - + - diff --git a/archive/2020/divisions/qf-bvfplra.html b/archive/2020/divisions/qf-bvfplra.html index 41963481..6d7dae35 100644 --- a/archive/2020/divisions/qf-bvfplra.html +++ b/archive/2020/divisions/qf-bvfplra.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -95,7 +95,6 @@

    Notes

    - + - diff --git a/archive/2020/divisions/qf-dt.html b/archive/2020/divisions/qf-dt.html index c788b510..b6f0660a 100644 --- a/archive/2020/divisions/qf-dt.html +++ b/archive/2020/divisions/qf-dt.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -95,7 +95,6 @@

    Notes

    - + - diff --git a/archive/2020/divisions/qf-fp.html b/archive/2020/divisions/qf-fp.html index 30b7088e..0d42610a 100644 --- a/archive/2020/divisions/qf-fp.html +++ b/archive/2020/divisions/qf-fp.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -91,7 +91,6 @@

    Tracks

    - + - diff --git a/archive/2020/divisions/qf-fplra.html b/archive/2020/divisions/qf-fplra.html index 08ff94fa..ee8e9b0f 100644 --- a/archive/2020/divisions/qf-fplra.html +++ b/archive/2020/divisions/qf-fplra.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -95,7 +95,6 @@

    Notes

    - + - diff --git a/archive/2020/divisions/qf-idl.html b/archive/2020/divisions/qf-idl.html index 05f9333d..0eeb0f11 100644 --- a/archive/2020/divisions/qf-idl.html +++ b/archive/2020/divisions/qf-idl.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -91,7 +91,6 @@

    Tracks

    - + - diff --git a/archive/2020/divisions/qf-lia.html b/archive/2020/divisions/qf-lia.html index b4c7336a..c8ede469 100644 --- a/archive/2020/divisions/qf-lia.html +++ b/archive/2020/divisions/qf-lia.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -91,7 +91,6 @@

    Tracks

    - + - diff --git a/archive/2020/divisions/qf-lira.html b/archive/2020/divisions/qf-lira.html index 539ae62c..cd853829 100644 --- a/archive/2020/divisions/qf-lira.html +++ b/archive/2020/divisions/qf-lira.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -91,7 +91,6 @@

    Tracks

    - + - diff --git a/archive/2020/divisions/qf-lra.html b/archive/2020/divisions/qf-lra.html index 22536d65..1eda3e21 100644 --- a/archive/2020/divisions/qf-lra.html +++ b/archive/2020/divisions/qf-lra.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -91,7 +91,6 @@

    Tracks

    - + - diff --git a/archive/2020/divisions/qf-nia.html b/archive/2020/divisions/qf-nia.html index 6f5e6397..5ba981da 100644 --- a/archive/2020/divisions/qf-nia.html +++ b/archive/2020/divisions/qf-nia.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -95,7 +95,6 @@

    Notes

    - + - diff --git a/archive/2020/divisions/qf-nira.html b/archive/2020/divisions/qf-nira.html index 7d90e97b..9e0c935c 100644 --- a/archive/2020/divisions/qf-nira.html +++ b/archive/2020/divisions/qf-nira.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -95,7 +95,6 @@

    Notes

    - + - diff --git a/archive/2020/divisions/qf-nra.html b/archive/2020/divisions/qf-nra.html index 636747e8..ee4af8b6 100644 --- a/archive/2020/divisions/qf-nra.html +++ b/archive/2020/divisions/qf-nra.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -95,7 +95,6 @@

    Notes

    - + - diff --git a/archive/2020/divisions/qf-rdl.html b/archive/2020/divisions/qf-rdl.html index 460a73a5..89ba6610 100644 --- a/archive/2020/divisions/qf-rdl.html +++ b/archive/2020/divisions/qf-rdl.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -91,7 +91,6 @@

    Tracks

    - + - diff --git a/archive/2020/divisions/qf-s.html b/archive/2020/divisions/qf-s.html index d754bf0e..42927600 100644 --- a/archive/2020/divisions/qf-s.html +++ b/archive/2020/divisions/qf-s.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -95,7 +95,6 @@

    Notes

    - + - diff --git a/archive/2020/divisions/qf-slia.html b/archive/2020/divisions/qf-slia.html index 0d3bcf20..641c47b9 100644 --- a/archive/2020/divisions/qf-slia.html +++ b/archive/2020/divisions/qf-slia.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -95,7 +95,6 @@

    Notes

    - + - diff --git a/archive/2020/divisions/qf-uf.html b/archive/2020/divisions/qf-uf.html index ad8e41d4..dd47ef0b 100644 --- a/archive/2020/divisions/qf-uf.html +++ b/archive/2020/divisions/qf-uf.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -91,7 +91,6 @@

    Tracks

    - + - diff --git a/archive/2020/divisions/qf-ufbv.html b/archive/2020/divisions/qf-ufbv.html index 07dc40ab..e37b0ad1 100644 --- a/archive/2020/divisions/qf-ufbv.html +++ b/archive/2020/divisions/qf-ufbv.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -91,7 +91,6 @@

    Tracks

    - + - diff --git a/archive/2020/divisions/qf-ufbvlia.html b/archive/2020/divisions/qf-ufbvlia.html index 461d8c42..be5e8ee9 100644 --- a/archive/2020/divisions/qf-ufbvlia.html +++ b/archive/2020/divisions/qf-ufbvlia.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -95,7 +95,6 @@

    Notes

    - + - diff --git a/archive/2020/divisions/qf-uffp.html b/archive/2020/divisions/qf-uffp.html index 623b356a..1527bee3 100644 --- a/archive/2020/divisions/qf-uffp.html +++ b/archive/2020/divisions/qf-uffp.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -91,7 +91,6 @@

    Tracks

    - + - diff --git a/archive/2020/divisions/qf-ufidl.html b/archive/2020/divisions/qf-ufidl.html index ef7ef8db..0ca01ea0 100644 --- a/archive/2020/divisions/qf-ufidl.html +++ b/archive/2020/divisions/qf-ufidl.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -91,7 +91,6 @@

    Tracks

    - + - diff --git a/archive/2020/divisions/qf-uflia.html b/archive/2020/divisions/qf-uflia.html index 819204bc..c4853486 100644 --- a/archive/2020/divisions/qf-uflia.html +++ b/archive/2020/divisions/qf-uflia.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -91,7 +91,6 @@

    Tracks

    - + - diff --git a/archive/2020/divisions/qf-uflra.html b/archive/2020/divisions/qf-uflra.html index 8276d2e6..20a0075f 100644 --- a/archive/2020/divisions/qf-uflra.html +++ b/archive/2020/divisions/qf-uflra.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -91,7 +91,6 @@

    Tracks

    - + - diff --git a/archive/2020/divisions/qf-ufnia.html b/archive/2020/divisions/qf-ufnia.html index ee3fcc42..a415474c 100644 --- a/archive/2020/divisions/qf-ufnia.html +++ b/archive/2020/divisions/qf-ufnia.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -95,7 +95,6 @@

    Notes

    - + - diff --git a/archive/2020/divisions/qf-ufnra.html b/archive/2020/divisions/qf-ufnra.html index a485cee3..5ff9f7f1 100644 --- a/archive/2020/divisions/qf-ufnra.html +++ b/archive/2020/divisions/qf-ufnra.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -95,7 +95,6 @@

    Notes

    - + - diff --git a/archive/2020/divisions/uf.html b/archive/2020/divisions/uf.html index 37705ec3..6e9eb88c 100644 --- a/archive/2020/divisions/uf.html +++ b/archive/2020/divisions/uf.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -91,7 +91,6 @@

    Tracks

    - + - diff --git a/archive/2020/divisions/ufbv.html b/archive/2020/divisions/ufbv.html index 8aeb0122..8afcf8e9 100644 --- a/archive/2020/divisions/ufbv.html +++ b/archive/2020/divisions/ufbv.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -91,7 +91,6 @@

    Tracks

    - + - diff --git a/archive/2020/divisions/ufdt.html b/archive/2020/divisions/ufdt.html index aca6ce72..28d08fb7 100644 --- a/archive/2020/divisions/ufdt.html +++ b/archive/2020/divisions/ufdt.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -91,7 +91,6 @@

    Tracks

    - + - diff --git a/archive/2020/divisions/ufdtlia.html b/archive/2020/divisions/ufdtlia.html index 49d1f334..40c3c67f 100644 --- a/archive/2020/divisions/ufdtlia.html +++ b/archive/2020/divisions/ufdtlia.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -91,7 +91,6 @@

    Tracks

    - + - diff --git a/archive/2020/divisions/ufdtlira.html b/archive/2020/divisions/ufdtlira.html index d919939c..6f7f3025 100644 --- a/archive/2020/divisions/ufdtlira.html +++ b/archive/2020/divisions/ufdtlira.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -91,7 +91,6 @@

    Tracks

    - + - diff --git a/archive/2020/divisions/ufdtnia.html b/archive/2020/divisions/ufdtnia.html index c22e6dfc..d1e85567 100644 --- a/archive/2020/divisions/ufdtnia.html +++ b/archive/2020/divisions/ufdtnia.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -91,7 +91,6 @@

    Tracks

    - + - diff --git a/archive/2020/divisions/ufdtnira.html b/archive/2020/divisions/ufdtnira.html index 47ef77f4..5df615f9 100644 --- a/archive/2020/divisions/ufdtnira.html +++ b/archive/2020/divisions/ufdtnira.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -91,7 +91,6 @@

    Tracks

    - + - diff --git a/archive/2020/divisions/uffpdtlira.html b/archive/2020/divisions/uffpdtlira.html index 197e61ca..5fc18825 100644 --- a/archive/2020/divisions/uffpdtlira.html +++ b/archive/2020/divisions/uffpdtlira.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -91,7 +91,6 @@

    Tracks

    - + - diff --git a/archive/2020/divisions/uffpdtnira.html b/archive/2020/divisions/uffpdtnira.html index 2a21e1b3..47624d23 100644 --- a/archive/2020/divisions/uffpdtnira.html +++ b/archive/2020/divisions/uffpdtnira.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -91,7 +91,6 @@

    Tracks

    - + - diff --git a/archive/2020/divisions/ufidl.html b/archive/2020/divisions/ufidl.html index 66672b98..6845d7d0 100644 --- a/archive/2020/divisions/ufidl.html +++ b/archive/2020/divisions/ufidl.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -91,7 +91,6 @@

    Tracks

    - + - diff --git a/archive/2020/divisions/uflia.html b/archive/2020/divisions/uflia.html index dbeceec1..7900eb52 100644 --- a/archive/2020/divisions/uflia.html +++ b/archive/2020/divisions/uflia.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -91,7 +91,6 @@

    Tracks

    - + - diff --git a/archive/2020/divisions/uflra.html b/archive/2020/divisions/uflra.html index 9b495a2e..0c46d132 100644 --- a/archive/2020/divisions/uflra.html +++ b/archive/2020/divisions/uflra.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -91,7 +91,6 @@

    Tracks

    - + - diff --git a/archive/2020/divisions/ufnia.html b/archive/2020/divisions/ufnia.html index ba0f328d..3461e5c0 100644 --- a/archive/2020/divisions/ufnia.html +++ b/archive/2020/divisions/ufnia.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -91,7 +91,6 @@

    Tracks

    - + - diff --git a/archive/2020/divisions/ufnra.html b/archive/2020/divisions/ufnra.html index 8939edeb..e217621c 100644 --- a/archive/2020/divisions/ufnra.html +++ b/archive/2020/divisions/ufnra.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -91,7 +91,6 @@

    Tracks

    - + - diff --git a/archive/2020/index.html b/archive/2020/index.html index 81456e38..30cb5e6f 100644 --- a/archive/2020/index.html +++ b/archive/2020/index.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -141,7 +141,6 @@

    Acknowledgment

    - + - diff --git a/archive/2020/news.html b/archive/2020/news.html index bf9e937c..f5280d74 100644 --- a/archive/2020/news.html +++ b/archive/2020/news.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -92,7 +92,6 @@

    Competiti - + - diff --git a/archive/2020/news/2020-04-03.html b/archive/2020/news/2020-04-03.html index ec155c2b..1ae9c7ad 100644 --- a/archive/2020/news/2020-04-03.html +++ b/archive/2020/news/2020-04-03.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -81,7 +81,6 @@

    SMT-COMP 2020 rules draft available

    - + - diff --git a/archive/2020/news/2020-04-20.html b/archive/2020/news/2020-04-20.html index 2ec606bd..a6270c8c 100644 --- a/archive/2020/news/2020-04-20.html +++ b/archive/2020/news/2020-04-20.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -81,7 +81,6 @@

    SMT-COMP 2020 final version of tools available

    - + - diff --git a/archive/2020/news/2020-05-04.html b/archive/2020/news/2020-05-04.html index 65e75d36..5826c7ef 100644 --- a/archive/2020/news/2020-05-04.html +++ b/archive/2020/news/2020-05-04.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -81,7 +81,6 @@

    SMT-COMP Solver registration

    - + - diff --git a/archive/2020/news/2020-05-06.html b/archive/2020/news/2020-05-06.html index b766fca0..96fb02ec 100644 --- a/archive/2020/news/2020-05-06.html +++ b/archive/2020/news/2020-05-06.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -81,7 +81,6 @@

    SMT-COMP Competing Solvers

    - + - diff --git a/archive/2020/news/2020-05-13.html b/archive/2020/news/2020-05-13.html index ad8fc483..613c9db3 100644 --- a/archive/2020/news/2020-05-13.html +++ b/archive/2020/news/2020-05-13.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -86,7 +86,6 @@

    SMT-COMP Final Solver Deadline Extension

    - + - diff --git a/archive/2020/news/2020-05-26.html b/archive/2020/news/2020-05-26.html index 1cb31731..77b7345b 100644 --- a/archive/2020/news/2020-05-26.html +++ b/archive/2020/news/2020-05-26.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -84,7 +84,6 @@

    24 h Extension to Final Solver Deadline

    - + - diff --git a/archive/2020/news/2020-05-29.html b/archive/2020/news/2020-05-29.html index b973fbb1..07d3c63c 100644 --- a/archive/2020/news/2020-05-29.html +++ b/archive/2020/news/2020-05-29.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -92,8 +92,8 @@

    Benchmark List published and Jobs started.

    -

    Please note that the summary in StarExec may put benchmarks in the wrong -category. For example, solved incremental benchmarks are put into the +

    Please note that the summary in StarExec may put benchmarks in the wrong +category. For example, solved incremental benchmarks are put into the unknown category. The final evaluation will use the job information that can be downloaded from the bottom of the page.

    @@ -111,7 +111,6 @@

    Benchmark List published and Jobs started.

    - + - diff --git a/archive/2020/news/2020-07-05.html b/archive/2020/news/2020-07-05.html index 1b0dbac4..949907c9 100644 --- a/archive/2020/news/2020-07-05.html +++ b/archive/2020/news/2020-07-05.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -88,7 +88,6 @@

    Competition Results and Presentation July 6 at 14:30 CEST

    - + - diff --git a/archive/2020/news/2020-07-31.html b/archive/2020/news/2020-07-31.html index c2a10466..c4b7df39 100644 --- a/archive/2020/news/2020-07-31.html +++ b/archive/2020/news/2020-07-31.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -83,7 +83,6 @@

    Competition Results Available

    - + - diff --git a/archive/2020/participants.html b/archive/2020/participants.html index 60616067..f0b86bfb 100644 --- a/archive/2020/participants.html +++ b/archive/2020/participants.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -5839,7 +5839,6 @@

    UFNRA

    - + - diff --git a/archive/2020/participants/2018-boolector-incremental.html b/archive/2020/participants/2018-boolector-incremental.html index 332d7b2e..d3d7134f 100644 --- a/archive/2020/participants/2018-boolector-incremental.html +++ b/archive/2020/participants/2018-boolector-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -132,8 +132,6 @@

    2018-Boolector (incremental)

    - + - - diff --git a/archive/2020/participants/2018-cvc4-incremental.html b/archive/2020/participants/2018-cvc4-incremental.html index 5643ed73..a5d1f14c 100644 --- a/archive/2020/participants/2018-cvc4-incremental.html +++ b/archive/2020/participants/2018-cvc4-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -132,8 +132,6 @@

    2018-CVC4 (incremental)

    - + - - diff --git a/archive/2020/participants/2018-cvc4-unsat-core.html b/archive/2020/participants/2018-cvc4-unsat-core.html index 55a59cc9..356ff0a0 100644 --- a/archive/2020/participants/2018-cvc4-unsat-core.html +++ b/archive/2020/participants/2018-cvc4-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -132,8 +132,6 @@

    2018-CVC4 (unsat core)

    - + - - diff --git a/archive/2020/participants/2018-cvc4.html b/archive/2020/participants/2018-cvc4.html index 2f859783..21e1b4db 100644 --- a/archive/2020/participants/2018-cvc4.html +++ b/archive/2020/participants/2018-cvc4.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -132,8 +132,6 @@

    2018-CVC4

    - + - - diff --git a/archive/2020/participants/2018-mathsat-incremental.html b/archive/2020/participants/2018-mathsat-incremental.html index faae4ac7..83e7a0b1 100644 --- a/archive/2020/participants/2018-mathsat-incremental.html +++ b/archive/2020/participants/2018-mathsat-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -132,8 +132,6 @@

    2018-MathSAT (incremental)

    - + - - diff --git a/archive/2020/participants/2018-smtrat-rat.html b/archive/2020/participants/2018-smtrat-rat.html index acd61eb2..4089b08e 100644 --- a/archive/2020/participants/2018-smtrat-rat.html +++ b/archive/2020/participants/2018-smtrat-rat.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -132,8 +132,6 @@

    2018-SMTRAT-Rat

    - + - - diff --git a/archive/2020/participants/2018-vampire.html b/archive/2020/participants/2018-vampire.html index 75a3ef72..68290605 100644 --- a/archive/2020/participants/2018-vampire.html +++ b/archive/2020/participants/2018-vampire.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -132,8 +132,6 @@

    2018-Vampire

    - + - - diff --git a/archive/2020/participants/2018-yices-incremental.html b/archive/2020/participants/2018-yices-incremental.html index 0f29da9f..df04a832 100644 --- a/archive/2020/participants/2018-yices-incremental.html +++ b/archive/2020/participants/2018-yices-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -132,8 +132,6 @@

    2018-Yices (incremental)

    - + - - diff --git a/archive/2020/participants/2018-yices-unsat-core.html b/archive/2020/participants/2018-yices-unsat-core.html index 97c19a92..540fc5af 100644 --- a/archive/2020/participants/2018-yices-unsat-core.html +++ b/archive/2020/participants/2018-yices-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -132,8 +132,6 @@

    2018-Yices (unsat core)

    - + - - diff --git a/archive/2020/participants/2018-yices.html b/archive/2020/participants/2018-yices.html index 64598a75..5d2882d3 100644 --- a/archive/2020/participants/2018-yices.html +++ b/archive/2020/participants/2018-yices.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -132,8 +132,6 @@

    2018-Yices

    - + - - diff --git a/archive/2020/participants/2018-z3-incremental.html b/archive/2020/participants/2018-z3-incremental.html index 1aa2e1d3..831c27e1 100644 --- a/archive/2020/participants/2018-z3-incremental.html +++ b/archive/2020/participants/2018-z3-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -132,8 +132,6 @@

    2018-Z3 (incremental)

    - + - - diff --git a/archive/2020/participants/2018-z3-unsat-core.html b/archive/2020/participants/2018-z3-unsat-core.html index dc995084..e38a6775 100644 --- a/archive/2020/participants/2018-z3-unsat-core.html +++ b/archive/2020/participants/2018-z3-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -132,8 +132,6 @@

    2018-Z3 (unsat core)

    - + - - diff --git a/archive/2020/participants/2018-z3.html b/archive/2020/participants/2018-z3.html index 1f777f06..cd2a94dc 100644 --- a/archive/2020/participants/2018-z3.html +++ b/archive/2020/participants/2018-z3.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -132,8 +132,6 @@

    2018-Z3

    - + - - diff --git a/archive/2020/participants/2019-boolector.html b/archive/2020/participants/2019-boolector.html index 17fb9132..2d5d85ae 100644 --- a/archive/2020/participants/2019-boolector.html +++ b/archive/2020/participants/2019-boolector.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -132,8 +132,6 @@

    2019-Boolector

    - + - - diff --git a/archive/2020/participants/2019-cvc4-inc.html b/archive/2020/participants/2019-cvc4-inc.html index eb33973f..407d9af7 100644 --- a/archive/2020/participants/2019-cvc4-inc.html +++ b/archive/2020/participants/2019-cvc4-inc.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -132,8 +132,6 @@

    2019-CVC4-inc

    - + - - diff --git a/archive/2020/participants/2019-cvc4-uc.html b/archive/2020/participants/2019-cvc4-uc.html index 309dfb69..a899b38f 100644 --- a/archive/2020/participants/2019-cvc4-uc.html +++ b/archive/2020/participants/2019-cvc4-uc.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -132,8 +132,6 @@

    2019-CVC4-uc

    - + - - diff --git a/archive/2020/participants/2019-cvc4.html b/archive/2020/participants/2019-cvc4.html index b305ed0a..17f44193 100644 --- a/archive/2020/participants/2019-cvc4.html +++ b/archive/2020/participants/2019-cvc4.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -132,8 +132,6 @@

    2019-CVC4

    - + - - diff --git a/archive/2020/participants/2019-mathsat-default.html b/archive/2020/participants/2019-mathsat-default.html index 8ee05c77..b83ede54 100644 --- a/archive/2020/participants/2019-mathsat-default.html +++ b/archive/2020/participants/2019-mathsat-default.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -132,8 +132,6 @@

    2019-MathSAT-default

    - + - - diff --git a/archive/2020/participants/2019-mathsat-na-ext.html b/archive/2020/participants/2019-mathsat-na-ext.html index 632eabdf..a0651203 100644 --- a/archive/2020/participants/2019-mathsat-na-ext.html +++ b/archive/2020/participants/2019-mathsat-na-ext.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -132,8 +132,6 @@

    2019-MathSAT-na-ext

    - + - - diff --git a/archive/2020/participants/2019-par4.html b/archive/2020/participants/2019-par4.html index 2d347b60..166d8cc8 100644 --- a/archive/2020/participants/2019-par4.html +++ b/archive/2020/participants/2019-par4.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -132,8 +132,6 @@

    2019-Par4

    - + - - diff --git a/archive/2020/participants/2019-poolector.html b/archive/2020/participants/2019-poolector.html index de14a31a..23f86b18 100644 --- a/archive/2020/participants/2019-poolector.html +++ b/archive/2020/participants/2019-poolector.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -132,8 +132,6 @@

    2019-Poolector

    - + - - diff --git a/archive/2020/participants/2019-smtinterpol.html b/archive/2020/participants/2019-smtinterpol.html index 676f652f..fe3b1a99 100644 --- a/archive/2020/participants/2019-smtinterpol.html +++ b/archive/2020/participants/2019-smtinterpol.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -132,8 +132,6 @@

    2019-SMTInterpol

    - + - - diff --git a/archive/2020/participants/2019-spass-satt.html b/archive/2020/participants/2019-spass-satt.html index cf33b913..e5eb0b8e 100644 --- a/archive/2020/participants/2019-spass-satt.html +++ b/archive/2020/participants/2019-spass-satt.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -132,8 +132,6 @@

    2019-SPASS-SATT

    - + - - diff --git a/archive/2020/participants/2019-vampire.html b/archive/2020/participants/2019-vampire.html index cff64f70..d513dc0d 100644 --- a/archive/2020/participants/2019-vampire.html +++ b/archive/2020/participants/2019-vampire.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -132,8 +132,6 @@

    2019-Vampire

    - + - - diff --git a/archive/2020/participants/2019-yices-2-6-2-incremental.html b/archive/2020/participants/2019-yices-2-6-2-incremental.html index 096f1f75..fdcf318e 100644 --- a/archive/2020/participants/2019-yices-2-6-2-incremental.html +++ b/archive/2020/participants/2019-yices-2-6-2-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -132,8 +132,6 @@

    2019-Yices 2.6.2 Incremental

    - + - - diff --git a/archive/2020/participants/2019-yices-2-6-2.html b/archive/2020/participants/2019-yices-2-6-2.html index d07df0fb..2d36fd20 100644 --- a/archive/2020/participants/2019-yices-2-6-2.html +++ b/archive/2020/participants/2019-yices-2-6-2.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -132,8 +132,6 @@

    2019-Yices 2.6.2

    - + - - diff --git a/archive/2020/participants/2019-z3.html b/archive/2020/participants/2019-z3.html index 9f55b1b9..8740ca10 100644 --- a/archive/2020/participants/2019-z3.html +++ b/archive/2020/participants/2019-z3.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -132,8 +132,6 @@

    2019-Z3

    - + - - diff --git a/archive/2020/participants/alt-ergo.html b/archive/2020/participants/alt-ergo.html index fa468041..999c4f69 100644 --- a/archive/2020/participants/alt-ergo.html +++ b/archive/2020/participants/alt-ergo.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -132,8 +132,6 @@

    Alt-Ergo

    - + - - diff --git a/archive/2020/participants/aprove.html b/archive/2020/participants/aprove.html index f0b0ae3c..0a74c643 100644 --- a/archive/2020/participants/aprove.html +++ b/archive/2020/participants/aprove.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -132,8 +132,6 @@

    AProVE

    - + - - diff --git a/archive/2020/participants/bitwuzla-fixed.html b/archive/2020/participants/bitwuzla-fixed.html index 8e74b300..d8eb4faf 100644 --- a/archive/2020/participants/bitwuzla-fixed.html +++ b/archive/2020/participants/bitwuzla-fixed.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -132,8 +132,6 @@

    Bitwuzla-fixed

    - + - - diff --git a/archive/2020/participants/bitwuzla.html b/archive/2020/participants/bitwuzla.html index f92502a6..6cdcede0 100644 --- a/archive/2020/participants/bitwuzla.html +++ b/archive/2020/participants/bitwuzla.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -132,8 +132,6 @@

    Bitwuzla

    - + - - diff --git a/archive/2020/participants/colibri.html b/archive/2020/participants/colibri.html index ad8ee7a3..9d1765b8 100644 --- a/archive/2020/participants/colibri.html +++ b/archive/2020/participants/colibri.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -132,8 +132,6 @@

    COLIBRI

    - + - - diff --git a/archive/2020/participants/cvc4-inc.html b/archive/2020/participants/cvc4-inc.html index c4bb053b..25ef3072 100644 --- a/archive/2020/participants/cvc4-inc.html +++ b/archive/2020/participants/cvc4-inc.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -132,8 +132,6 @@

    CVC4-inc

    - + - - diff --git a/archive/2020/participants/cvc4-mv.html b/archive/2020/participants/cvc4-mv.html index 47b5513c..2c7c225e 100644 --- a/archive/2020/participants/cvc4-mv.html +++ b/archive/2020/participants/cvc4-mv.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -132,8 +132,6 @@

    CVC4-mv

    - + - - diff --git a/archive/2020/participants/cvc4-uc.html b/archive/2020/participants/cvc4-uc.html index fabc4c18..0aae9825 100644 --- a/archive/2020/participants/cvc4-uc.html +++ b/archive/2020/participants/cvc4-uc.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -132,8 +132,6 @@

    CVC4-uc

    - + - - diff --git a/archive/2020/participants/cvc4.html b/archive/2020/participants/cvc4.html index a89ad62a..c8e63ea9 100644 --- a/archive/2020/participants/cvc4.html +++ b/archive/2020/participants/cvc4.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -132,8 +132,6 @@

    CVC4

    - + - - diff --git a/archive/2020/participants/lazybv2int.html b/archive/2020/participants/lazybv2int.html index de7968d2..db5f201a 100644 --- a/archive/2020/participants/lazybv2int.html +++ b/archive/2020/participants/lazybv2int.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -132,8 +132,6 @@

    LazyBV2Int

    - + - - diff --git a/archive/2020/participants/mathsat5-mv.html b/archive/2020/participants/mathsat5-mv.html index 8d52c7c1..01ffabd9 100644 --- a/archive/2020/participants/mathsat5-mv.html +++ b/archive/2020/participants/mathsat5-mv.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -132,8 +132,6 @@

    MathSAT5-mv

    - + - - diff --git a/archive/2020/participants/mathsat5.html b/archive/2020/participants/mathsat5.html index fb3d17d3..a5ee44a2 100644 --- a/archive/2020/participants/mathsat5.html +++ b/archive/2020/participants/mathsat5.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -132,8 +132,6 @@

    MathSAT5

    - + - - diff --git a/archive/2020/participants/minkeyrink-fixed.html b/archive/2020/participants/minkeyrink-fixed.html index ef6ec017..a7606493 100644 --- a/archive/2020/participants/minkeyrink-fixed.html +++ b/archive/2020/participants/minkeyrink-fixed.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -132,8 +132,6 @@

    MinkeyRink-fixed

    - + - - diff --git a/archive/2020/participants/minkeyrink.html b/archive/2020/participants/minkeyrink.html index 2fa82322..994b903b 100644 --- a/archive/2020/participants/minkeyrink.html +++ b/archive/2020/participants/minkeyrink.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -132,8 +132,6 @@

    MinkeyRink

    - + - - diff --git a/archive/2020/participants/opensmt.html b/archive/2020/participants/opensmt.html index 228eac86..5f291215 100644 --- a/archive/2020/participants/opensmt.html +++ b/archive/2020/participants/opensmt.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -132,8 +132,6 @@

    OpenSMT

    - + - - diff --git a/archive/2020/participants/smt-rat-calc.html b/archive/2020/participants/smt-rat-calc.html index 0f40cc45..0eeba753 100644 --- a/archive/2020/participants/smt-rat-calc.html +++ b/archive/2020/participants/smt-rat-calc.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -132,8 +132,6 @@

    SMT-RAT-CAlC

    - + - - diff --git a/archive/2020/participants/smt-rat-mcsat.html b/archive/2020/participants/smt-rat-mcsat.html index e987d1db..1e8e91f2 100644 --- a/archive/2020/participants/smt-rat-mcsat.html +++ b/archive/2020/participants/smt-rat-mcsat.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -132,8 +132,6 @@

    SMT-RAT-MCSAT

    - + - - diff --git a/archive/2020/participants/smt-rat.html b/archive/2020/participants/smt-rat.html index 0e181a84..eada355d 100644 --- a/archive/2020/participants/smt-rat.html +++ b/archive/2020/participants/smt-rat.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -132,8 +132,6 @@

    SMT-RAT

    - + - - diff --git a/archive/2020/participants/smtinterpol-fixed.html b/archive/2020/participants/smtinterpol-fixed.html index b587864d..49d7e0d3 100644 --- a/archive/2020/participants/smtinterpol-fixed.html +++ b/archive/2020/participants/smtinterpol-fixed.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -132,8 +132,6 @@

    SMTInterpol-fixed

    - + - - diff --git a/archive/2020/participants/smtinterpol.html b/archive/2020/participants/smtinterpol.html index 7e328667..0814129f 100644 --- a/archive/2020/participants/smtinterpol.html +++ b/archive/2020/participants/smtinterpol.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -132,8 +132,6 @@

    SMTInterpol

    - + - - diff --git a/archive/2020/participants/stp-cms.html b/archive/2020/participants/stp-cms.html index 3f58a246..efef1040 100644 --- a/archive/2020/participants/stp-cms.html +++ b/archive/2020/participants/stp-cms.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -132,8 +132,6 @@

    STP + CMS

    - + - - diff --git a/archive/2020/participants/stp-mergesat.html b/archive/2020/participants/stp-mergesat.html index b451a3d5..46bf64b1 100644 --- a/archive/2020/participants/stp-mergesat.html +++ b/archive/2020/participants/stp-mergesat.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -132,8 +132,6 @@

    STP + MergeSAT

    - + - - diff --git a/archive/2020/participants/ultimateeliminator-mathsat.html b/archive/2020/participants/ultimateeliminator-mathsat.html index 416b8e9c..eba65745 100644 --- a/archive/2020/participants/ultimateeliminator-mathsat.html +++ b/archive/2020/participants/ultimateeliminator-mathsat.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -132,8 +132,6 @@

    UltimateEliminator+MathSAT

    - + - - diff --git a/archive/2020/participants/vampire.html b/archive/2020/participants/vampire.html index 33e3ca8d..d753161d 100644 --- a/archive/2020/participants/vampire.html +++ b/archive/2020/participants/vampire.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -132,8 +132,6 @@

    Vampire

    - + - - diff --git a/archive/2020/participants/verit-rasat-redlog.html b/archive/2020/participants/verit-rasat-redlog.html index 3d093116..ce8effb5 100644 --- a/archive/2020/participants/verit-rasat-redlog.html +++ b/archive/2020/participants/verit-rasat-redlog.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -132,8 +132,6 @@

    veriT+raSAT+Redlog

    - + - - diff --git a/archive/2020/participants/verit-vite.html b/archive/2020/participants/verit-vite.html index 70647884..f0252af7 100644 --- a/archive/2020/participants/verit-vite.html +++ b/archive/2020/participants/verit-vite.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -132,8 +132,6 @@

    veriT+vite

    - + - - diff --git a/archive/2020/participants/verit.html b/archive/2020/participants/verit.html index b25da3dd..3e275cd8 100644 --- a/archive/2020/participants/verit.html +++ b/archive/2020/participants/verit.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -132,8 +132,6 @@

    veriT

    - + - - diff --git a/archive/2020/participants/yices2-fixed-incremental.html b/archive/2020/participants/yices2-fixed-incremental.html index 51c9dc12..30e79d7c 100644 --- a/archive/2020/participants/yices2-fixed-incremental.html +++ b/archive/2020/participants/yices2-fixed-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -132,8 +132,6 @@

    Yices2-fixed incremental

    - + - - diff --git a/archive/2020/participants/yices2-fixed-model-validation.html b/archive/2020/participants/yices2-fixed-model-validation.html index e6ff633b..f9672982 100644 --- a/archive/2020/participants/yices2-fixed-model-validation.html +++ b/archive/2020/participants/yices2-fixed-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -132,8 +132,6 @@

    Yices2-fixed Model Validation

    - + - - diff --git a/archive/2020/participants/yices2-fixed.html b/archive/2020/participants/yices2-fixed.html index 26f74ce5..13c85b7f 100644 --- a/archive/2020/participants/yices2-fixed.html +++ b/archive/2020/participants/yices2-fixed.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -132,8 +132,6 @@

    Yices2-fixed

    - + - - diff --git a/archive/2020/participants/yices2-incremental.html b/archive/2020/participants/yices2-incremental.html index 8b31700e..a1637feb 100644 --- a/archive/2020/participants/yices2-incremental.html +++ b/archive/2020/participants/yices2-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -132,8 +132,6 @@

    Yices2 incremental

    - + - - diff --git a/archive/2020/participants/yices2-model-validation.html b/archive/2020/participants/yices2-model-validation.html index 96e41a98..cd094841 100644 --- a/archive/2020/participants/yices2-model-validation.html +++ b/archive/2020/participants/yices2-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -132,8 +132,6 @@

    Yices2 Model Validation

    - + - - diff --git a/archive/2020/participants/yices2.html b/archive/2020/participants/yices2.html index 65cf4fb8..bca7f851 100644 --- a/archive/2020/participants/yices2.html +++ b/archive/2020/participants/yices2.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -132,8 +132,6 @@

    Yices2

    - + - - diff --git a/archive/2020/participants/z3.html b/archive/2020/participants/z3.html index 0886832f..1c82a95c 100644 --- a/archive/2020/participants/z3.html +++ b/archive/2020/participants/z3.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -132,8 +132,6 @@

    z3

    - + - - diff --git a/archive/2020/participants/z3str4.html b/archive/2020/participants/z3str4.html index 93c2cffa..d8431e03 100644 --- a/archive/2020/participants/z3str4.html +++ b/archive/2020/participants/z3str4.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -132,8 +132,6 @@

    Z3str4

    - + - - diff --git a/archive/2020/results.html b/archive/2020/results.html index 57b64f59..38b46c2d 100644 --- a/archive/2020/results.html +++ b/archive/2020/results.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,557 +140,557 @@

    Disagreements

    Divisions

    @@ -705,7 +705,6 @@

    Divisions

    - + - diff --git a/archive/2020/results/abvfp-single-query.html b/archive/2020/results/abvfp-single-query.html index bbdacc19..54063d4e 100644 --- a/archive/2020/results/abvfp-single-query.html +++ b/archive/2020/results/abvfp-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    ABVFP (Single Query Track)

    Competition results for the ABVFP - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    ABVFP (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) CVC4CVC4CVC4 - - + + CVC4 - - + + CVC4 - + @@ -131,7 +131,7 @@

    ABVFP (Single Query Track)

    - + CVC4 0 15 @@ -142,7 +142,7 @@

    ABVFP (Single Query Track)

    - + 2018-CVC4n 0 15 @@ -153,7 +153,7 @@

    ABVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 10 @@ -175,7 +175,7 @@

    ABVFP (Single Query Track)

    - + CVC4 0 1513647.25813647.247151321511 @@ -184,7 +184,7 @@

    ABVFP (Single Query Track)

    - + 2018-CVC4n 0 1513712.07213712.266151321510 @@ -193,7 +193,7 @@

    ABVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 10101.94271.84310100200 @@ -213,7 +213,7 @@

    ABVFP (Single Query Track)

    - + CVC4 0 133.2463.242131301711 @@ -222,7 +222,7 @@

    ABVFP (Single Query Track)

    - + 2018-CVC4n 0 133.6263.623131301710 @@ -231,7 +231,7 @@

    ABVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 1046.53532.60810100200 @@ -251,7 +251,7 @@

    ABVFP (Single Query Track)

    - + 2018-CVC4n 0 20.1440.1442022810 @@ -260,7 +260,7 @@

    ABVFP (Single Query Track)

    - + CVC4 0 20.2760.2752022811 @@ -269,7 +269,7 @@

    ABVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 06.5384.597000300 @@ -289,7 +289,7 @@

    ABVFP (Single Query Track)

    - + CVC4 0 15291.992291.987151321512 @@ -298,7 +298,7 @@

    ABVFP (Single Query Track)

    - + 2018-CVC4n 0 15311.042311.045151321512 @@ -307,7 +307,7 @@

    ABVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 10101.94271.84310100200 @@ -331,7 +331,6 @@

    ABVFP (Single Query Track)

    - + - diff --git a/archive/2020/results/abvfplra-single-query.html b/archive/2020/results/abvfplra-single-query.html index 7f1ec9e8..57a78fef 100644 --- a/archive/2020/results/abvfplra-single-query.html +++ b/archive/2020/results/abvfplra-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    ABVFPLRA (Single Query Track)

    Competition results for the ABVFPLRA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    ABVFPLRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) CVC4CVC4CVC4 - - + + CVC4 - - + + CVC4 - + @@ -131,7 +131,7 @@

    ABVFPLRA (Single Query Track)

    - + CVC4 0 18 @@ -142,7 +142,7 @@

    ABVFPLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -164,7 +164,7 @@

    ABVFPLRA (Single Query Track)

    - + CVC4 0 1816951.00816951.223181625713 @@ -173,7 +173,7 @@

    ABVFPLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0249.949167.428000750 @@ -193,7 +193,7 @@

    ABVFPLRA (Single Query Track)

    - + CVC4 0 162.142.135161605913 @@ -202,7 +202,7 @@

    ABVFPLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 054.62535.697000750 @@ -222,7 +222,7 @@

    ABVFPLRA (Single Query Track)

    - + CVC4 0 24.1614.1612027313 @@ -231,7 +231,7 @@

    ABVFPLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 06.3334.428000750 @@ -251,7 +251,7 @@

    ABVFPLRA (Single Query Track)

    - + CVC4 0 18370.054370.038181625715 @@ -260,7 +260,7 @@

    ABVFPLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0249.949167.428000750 @@ -284,7 +284,6 @@

    ABVFPLRA (Single Query Track)

    - + - diff --git a/archive/2020/results/alia-single-query.html b/archive/2020/results/alia-single-query.html index 68995796..e4554a97 100644 --- a/archive/2020/results/alia-single-query.html +++ b/archive/2020/results/alia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    ALIA (Single Query Track)

    Competition results for the ALIA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    ALIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) SMTInterpolSMTInterpolSMTInterpol - - + + CVC4 - - + + SMTInterpol - + @@ -131,7 +131,7 @@

    ALIA (Single Query Track)

    - + 2018-Z3n 0 19 @@ -142,7 +142,7 @@

    ALIA (Single Query Track)

    - + z3n 0 19 @@ -153,7 +153,7 @@

    ALIA (Single Query Track)

    - + SMTInterpol-fixedn 0 19 @@ -164,7 +164,7 @@

    ALIA (Single Query Track)

    - + SMTInterpol 0 19 @@ -175,7 +175,7 @@

    ALIA (Single Query Track)

    - + CVC4 0 18 @@ -186,7 +186,7 @@

    ALIA (Single Query Track)

    - + Alt-Ergo 0 18 @@ -197,7 +197,7 @@

    ALIA (Single Query Track)

    - + Vampire 0 15 @@ -208,7 +208,7 @@

    ALIA (Single Query Track)

    - + veriT 0 4 @@ -219,7 +219,7 @@

    ALIA (Single Query Track)

    - + veriT+viten 0 4 @@ -230,7 +230,7 @@

    ALIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -252,7 +252,7 @@

    ALIA (Single Query Track)

    - + 2018-Z3n 0 191.1811.1821911800 @@ -261,7 +261,7 @@

    ALIA (Single Query Track)

    - + z3n 0 191.2581.2611911800 @@ -270,7 +270,7 @@

    ALIA (Single Query Track)

    - + SMTInterpol-fixedn 0 19152.05861.9971911800 @@ -279,7 +279,7 @@

    ALIA (Single Query Track)

    - + SMTInterpol 0 19154.54862.4811911800 @@ -288,7 +288,7 @@

    ALIA (Single Query Track)

    - + CVC4 0 183.1893.1911801810 @@ -297,7 +297,7 @@

    ALIA (Single Query Track)

    - + Alt-Ergo 0 1811.2884.6451801810 @@ -306,7 +306,7 @@

    ALIA (Single Query Track)

    - + Vampire 0 177771.6843953.2711701722 @@ -315,7 +315,7 @@

    ALIA (Single Query Track)

    - + veriT 0 417888.31617888.7154041514 @@ -324,7 +324,7 @@

    ALIA (Single Query Track)

    - + veriT+viten 0 418000.05818000.064041514 @@ -333,7 +333,7 @@

    ALIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 015632.99815621.1770001913 @@ -353,7 +353,7 @@

    ALIA (Single Query Track)

    - + z3n 0 10.0510.051110180 @@ -362,7 +362,7 @@

    ALIA (Single Query Track)

    - + 2018-Z3n 0 10.0540.054110180 @@ -371,7 +371,7 @@

    ALIA (Single Query Track)

    - + SMTInterpol 0 10.6950.428110180 @@ -380,7 +380,7 @@

    ALIA (Single Query Track)

    - + SMTInterpol-fixedn 0 10.6930.433110180 @@ -389,7 +389,7 @@

    ALIA (Single Query Track)

    - + CVC4 0 00.2890.296000190 @@ -398,7 +398,7 @@

    ALIA (Single Query Track)

    - + Alt-Ergo 0 01.1480.434000190 @@ -407,7 +407,7 @@

    ALIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 03.4522.4410001913 @@ -416,7 +416,7 @@

    ALIA (Single Query Track)

    - + veriT 0 01088.251088.650001914 @@ -425,7 +425,7 @@

    ALIA (Single Query Track)

    - + veriT+viten 0 01200.01200.00001914 @@ -434,7 +434,7 @@

    ALIA (Single Query Track)

    - + Vampire 0 01200.01200.0000192 @@ -454,7 +454,7 @@

    ALIA (Single Query Track)

    - + 2018-Z3n 0 181.1261.1281801810 @@ -463,7 +463,7 @@

    ALIA (Single Query Track)

    - + z3n 0 181.2081.211801810 @@ -472,7 +472,7 @@

    ALIA (Single Query Track)

    - + CVC4 0 182.92.8951801810 @@ -481,7 +481,7 @@

    ALIA (Single Query Track)

    - + Alt-Ergo 0 1810.1394.2121801810 @@ -490,7 +490,7 @@

    ALIA (Single Query Track)

    - + SMTInterpol-fixedn 0 18151.36661.5641801810 @@ -499,7 +499,7 @@

    ALIA (Single Query Track)

    - + SMTInterpol 0 18153.85362.0531801810 @@ -508,7 +508,7 @@

    ALIA (Single Query Track)

    - + Vampire 0 176571.6842753.2711701722 @@ -517,7 +517,7 @@

    ALIA (Single Query Track)

    - + veriT+viten 0 416800.05816800.064041514 @@ -526,7 +526,7 @@

    ALIA (Single Query Track)

    - + veriT 0 416800.06616800.0654041514 @@ -535,7 +535,7 @@

    ALIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 015629.54615618.7360001913 @@ -555,7 +555,7 @@

    ALIA (Single Query Track)

    - + 2018-Z3n 0 191.1811.1821911800 @@ -564,7 +564,7 @@

    ALIA (Single Query Track)

    - + z3n 0 191.2581.2611911800 @@ -573,7 +573,7 @@

    ALIA (Single Query Track)

    - + SMTInterpol-fixedn 0 19152.05861.9971911800 @@ -582,7 +582,7 @@

    ALIA (Single Query Track)

    - + SMTInterpol 0 19154.54862.4811911800 @@ -591,7 +591,7 @@

    ALIA (Single Query Track)

    - + CVC4 0 183.1893.1911801810 @@ -600,7 +600,7 @@

    ALIA (Single Query Track)

    - + Alt-Ergo 0 1811.2884.6451801810 @@ -609,7 +609,7 @@

    ALIA (Single Query Track)

    - + Vampire 0 7301.916295.4247071212 @@ -618,7 +618,7 @@

    ALIA (Single Query Track)

    - + veriT+viten 0 4360.058360.064041514 @@ -627,7 +627,7 @@

    ALIA (Single Query Track)

    - + veriT 0 4360.066360.0654041515 @@ -636,7 +636,7 @@

    ALIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0344.998333.1770001913 @@ -660,7 +660,6 @@

    ALIA (Single Query Track)

    - + - diff --git a/archive/2020/results/ania-incremental.html b/archive/2020/results/ania-incremental.html index ce068c23..875ab400 100644 --- a/archive/2020/results/ania-incremental.html +++ b/archive/2020/results/ania-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    ANIA (Incremental Track)

    Competition results for the ANIA - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    ANIA (Incremental Track)

    Parallel Performance CVC4-inc - - + + @@ -124,7 +124,7 @@

    ANIA (Incremental Track)

    - + CVC4-inc 0 23488523.51520.7300 @@ -133,7 +133,7 @@

    ANIA (Incremental Track)

    - + 2018-CVC4 (incremental)n 0 230861583.1761589.954021 @@ -142,7 +142,7 @@

    ANIA (Incremental Track)

    - + UltimateEliminator+MathSAT 0 08.954.099234880 @@ -166,7 +166,6 @@

    ANIA (Incremental Track)

    - + - diff --git a/archive/2020/results/aufbvdtlia-single-query.html b/archive/2020/results/aufbvdtlia-single-query.html index 77080537..ffef2e51 100644 --- a/archive/2020/results/aufbvdtlia-single-query.html +++ b/archive/2020/results/aufbvdtlia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    AUFBVDTLIA (Single Query Track)

    Competition results for the AUFBVDTLIA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    AUFBVDTLIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) CVC4CVC4CVC4 - - + + CVC4 - - + + CVC4 - + @@ -131,7 +131,7 @@

    AUFBVDTLIA (Single Query Track)

    - + CVC4 0 374 @@ -142,7 +142,7 @@

    AUFBVDTLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -164,7 +164,7 @@

    AUFBVDTLIA (Single Query Track)

    - + CVC4 0 374394698.463397568.438374152222306285 @@ -173,7 +173,7 @@

    AUFBVDTLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 02232.2621637.0610006800 @@ -193,7 +193,7 @@

    AUFBVDTLIA (Single Query Track)

    - + CVC4 0 15239242.35341807.6371521520528285 @@ -202,7 +202,7 @@

    AUFBVDTLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0494.457362.9220006800 @@ -222,7 +222,7 @@

    AUFBVDTLIA (Single Query Track)

    - + CVC4 0 22212406.90312682.1592220222458285 @@ -231,7 +231,7 @@

    AUFBVDTLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0724.175526.3410006800 @@ -251,7 +251,7 @@

    AUFBVDTLIA (Single Query Track)

    - + CVC4 0 19311707.0411706.9861939184487487 @@ -260,7 +260,7 @@

    AUFBVDTLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 02232.2621637.0610006800 @@ -284,7 +284,6 @@

    AUFBVDTLIA (Single Query Track)

    - + - diff --git a/archive/2020/results/aufbvdtlia-unsat-core.html b/archive/2020/results/aufbvdtlia-unsat-core.html index 29d1d349..ecdc395d 100644 --- a/archive/2020/results/aufbvdtlia-unsat-core.html +++ b/archive/2020/results/aufbvdtlia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    AUFBVDTLIA (Unsat Core Track)

    Competition results for the AUFBVDTLIA - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    AUFBVDTLIA (Unsat Core Track)

    Sequential PerformanceParallel Performance CVC4-ucCVC4-uc - - + + @@ -126,7 +126,7 @@

    AUFBVDTLIA (Unsat Core Track)

    - + CVC4-uc 0 2 @@ -137,7 +137,7 @@

    AUFBVDTLIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -159,7 +159,7 @@

    AUFBVDTLIA (Unsat Core Track)

    - + CVC4-uc 0 26.6486.6470 @@ -168,7 +168,7 @@

    AUFBVDTLIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 064.53145.5830 @@ -192,7 +192,6 @@

    AUFBVDTLIA (Unsat Core Track)

    - + - diff --git a/archive/2020/results/aufdtlia-single-query.html b/archive/2020/results/aufdtlia-single-query.html index c1f1b852..556e6498 100644 --- a/archive/2020/results/aufdtlia-single-query.html +++ b/archive/2020/results/aufdtlia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    AUFDTLIA (Single Query Track)

    Competition results for the AUFDTLIA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    AUFDTLIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) CVC4CVC4CVC4 - - + + CVC4 - - + + CVC4 - + @@ -131,7 +131,7 @@

    AUFDTLIA (Single Query Track)

    - + 2018-CVC4n 0 147 @@ -142,7 +142,7 @@

    AUFDTLIA (Single Query Track)

    - + CVC4 0 147 @@ -153,7 +153,7 @@

    AUFDTLIA (Single Query Track)

    - + Vampire 0 54 @@ -164,7 +164,7 @@

    AUFDTLIA (Single Query Track)

    - + Alt-Ergo 0 53 @@ -175,7 +175,7 @@

    AUFDTLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -197,7 +197,7 @@

    AUFDTLIA (Single Query Track)

    - + 2018-CVC4n 0 14723110.79923328.546147935400 @@ -206,7 +206,7 @@

    AUFDTLIA (Single Query Track)

    - + CVC4 0 14723164.98723515.546147935400 @@ -215,7 +215,7 @@

    AUFDTLIA (Single Query Track)

    - + Alt-Ergo 0 5453028.37348374.45540549339 @@ -224,7 +224,7 @@

    AUFDTLIA (Single Query Track)

    - + Vampire 0 54116371.495112003.377540549393 @@ -233,7 +233,7 @@

    AUFDTLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0482.985341.5190001470 @@ -253,7 +253,7 @@

    AUFDTLIA (Single Query Track)

    - + 2018-CVC4n 0 9323093.25523310.99193930540 @@ -262,7 +262,7 @@

    AUFDTLIA (Single Query Track)

    - + CVC4 0 9323149.37623499.9593930540 @@ -271,7 +271,7 @@

    AUFDTLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0308.535213.5790001470 @@ -280,7 +280,7 @@

    AUFDTLIA (Single Query Track)

    - + Alt-Ergo 0 046812.67146804.22800014739 @@ -289,7 +289,7 @@

    AUFDTLIA (Single Query Track)

    - + Vampire 0 0115200.28111582.7100014793 @@ -309,7 +309,7 @@

    AUFDTLIA (Single Query Track)

    - + CVC4 0 5415.61115.59554054930 @@ -318,7 +318,7 @@

    AUFDTLIA (Single Query Track)

    - + 2018-CVC4n 0 5417.54417.55654054930 @@ -327,7 +327,7 @@

    AUFDTLIA (Single Query Track)

    - + Vampire 0 541171.215420.667540549393 @@ -336,7 +336,7 @@

    AUFDTLIA (Single Query Track)

    - + Alt-Ergo 0 546215.7021570.222540549339 @@ -345,7 +345,7 @@

    AUFDTLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0174.45127.9390001470 @@ -365,7 +365,7 @@

    AUFDTLIA (Single Query Track)

    - + CVC4 0 991181.8771182.2829945544848 @@ -374,7 +374,7 @@

    AUFDTLIA (Single Query Track)

    - + 2018-CVC4n 0 991182.861182.7839945544848 @@ -383,7 +383,7 @@

    AUFDTLIA (Single Query Track)

    - + Vampire 0 492560.3652447.418490499898 @@ -392,7 +392,7 @@

    AUFDTLIA (Single Query Track)

    - + Alt-Ergo 0 471126.0431113.984704710046 @@ -401,7 +401,7 @@

    AUFDTLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0482.985341.5190001470 @@ -425,7 +425,6 @@

    AUFDTLIA (Single Query Track)

    - + - diff --git a/archive/2020/results/aufdtlira-single-query.html b/archive/2020/results/aufdtlira-single-query.html index 1d0629a9..1a673393 100644 --- a/archive/2020/results/aufdtlira-single-query.html +++ b/archive/2020/results/aufdtlira-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    AUFDTLIRA (Single Query Track)

    Competition results for the AUFDTLIRA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    AUFDTLIRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) CVC4CVC4— - - + + CVC4 - - + + CVC4 - + @@ -131,7 +131,7 @@

    AUFDTLIRA (Single Query Track)

    - + CVC4 0 3958 @@ -142,7 +142,7 @@

    AUFDTLIRA (Single Query Track)

    - + Alt-Ergo 0 3901 @@ -153,7 +153,7 @@

    AUFDTLIRA (Single Query Track)

    - + Vampire 0 3855 @@ -164,7 +164,7 @@

    AUFDTLIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -186,7 +186,7 @@

    AUFDTLIRA (Single Query Track)

    - + CVC4 0 395848364.67848363.96339580395848139 @@ -195,7 +195,7 @@

    AUFDTLIRA (Single Query Track)

    - + Vampire 0 39441181745.532765629.137394403944495495 @@ -204,7 +204,7 @@

    AUFDTLIRA (Single Query Track)

    - + Alt-Ergo 0 3903335611.165297697.372390303903536229 @@ -213,7 +213,7 @@

    AUFDTLIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 014664.40110027.54300044390 @@ -233,7 +233,7 @@

    AUFDTLIRA (Single Query Track)

    - + Alt-Ergo 0 00.00.00004439229 @@ -242,7 +242,7 @@

    AUFDTLIRA (Single Query Track)

    - + Vampire 0 00.00.00004439495 @@ -251,7 +251,7 @@

    AUFDTLIRA (Single Query Track)

    - + CVC4 0 00.00.0000443939 @@ -260,7 +260,7 @@

    AUFDTLIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.000044390 @@ -280,7 +280,7 @@

    AUFDTLIRA (Single Query Track)

    - + CVC4 0 395825544.36625543.6739580395848139 @@ -289,7 +289,7 @@

    AUFDTLIRA (Single Query Track)

    - + Vampire 0 3944851745.532435629.137394403944495495 @@ -298,7 +298,7 @@

    AUFDTLIRA (Single Query Track)

    - + Alt-Ergo 0 3903162503.893142199.329390303903536229 @@ -307,7 +307,7 @@

    AUFDTLIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 013768.7259404.32500044390 @@ -327,7 +327,7 @@

    AUFDTLIRA (Single Query Track)

    - + CVC4 0 39541391.341390.3839540395448543 @@ -336,7 +336,7 @@

    AUFDTLIRA (Single Query Track)

    - + Alt-Ergo 0 384811020.1728792.48384803848591310 @@ -345,7 +345,7 @@

    AUFDTLIRA (Single Query Track)

    - + Vampire 0 270246922.61244198.40527020270217371737 @@ -354,7 +354,7 @@

    AUFDTLIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 014664.40110027.54300044390 @@ -378,7 +378,6 @@

    AUFDTLIRA (Single Query Track)

    - + - diff --git a/archive/2020/results/aufdtlira-unsat-core.html b/archive/2020/results/aufdtlira-unsat-core.html index 2966ef2a..9d4124f3 100644 --- a/archive/2020/results/aufdtlira-unsat-core.html +++ b/archive/2020/results/aufdtlira-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    AUFDTLIRA (Unsat Core Track)

    Competition results for the AUFDTLIRA - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    AUFDTLIRA (Unsat Core Track)

    Sequential PerformanceParallel Performance CVC4-ucCVC4-uc - - + + @@ -126,7 +126,7 @@

    AUFDTLIRA (Unsat Core Track)

    - + CVC4-uc 0 155822 @@ -137,7 +137,7 @@

    AUFDTLIRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -159,7 +159,7 @@

    AUFDTLIRA (Unsat Core Track)

    - + CVC4-uc 0 1558225242.895245.8633 @@ -168,7 +168,7 @@

    AUFDTLIRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 013092.0189477.9240 @@ -192,7 +192,6 @@

    AUFDTLIRA (Unsat Core Track)

    - + - diff --git a/archive/2020/results/aufdtnira-single-query.html b/archive/2020/results/aufdtnira-single-query.html index 9c07bb14..3230125e 100644 --- a/archive/2020/results/aufdtnira-single-query.html +++ b/archive/2020/results/aufdtnira-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    AUFDTNIRA (Single Query Track)

    Competition results for the AUFDTNIRA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    AUFDTNIRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) VampireVampire— - - + + Vampire - - + + CVC4 - + @@ -131,7 +131,7 @@

    AUFDTNIRA (Single Query Track)

    - + Vampire 0 222 @@ -142,7 +142,7 @@

    AUFDTNIRA (Single Query Track)

    - + Alt-Ergo 0 214 @@ -153,7 +153,7 @@

    AUFDTNIRA (Single Query Track)

    - + CVC4 0 208 @@ -164,7 +164,7 @@

    AUFDTNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -186,7 +186,7 @@

    AUFDTNIRA (Single Query Track)

    - + Vampire 0 296246878.51569879.049296029644 @@ -195,7 +195,7 @@

    AUFDTNIRA (Single Query Track)

    - + Alt-Ergo 0 218112610.161101102.78321802188261 @@ -204,7 +204,7 @@

    AUFDTNIRA (Single Query Track)

    - + CVC4 0 20844427.34244427.26920802089237 @@ -213,7 +213,7 @@

    AUFDTNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0974.831683.3480003000 @@ -233,7 +233,7 @@

    AUFDTNIRA (Single Query Track)

    - + Alt-Ergo 0 00.00.000030061 @@ -242,7 +242,7 @@

    AUFDTNIRA (Single Query Track)

    - + Vampire 0 00.00.00003004 @@ -251,7 +251,7 @@

    AUFDTNIRA (Single Query Track)

    - + CVC4 0 00.00.000030037 @@ -260,7 +260,7 @@

    AUFDTNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.00003000 @@ -280,7 +280,7 @@

    AUFDTNIRA (Single Query Track)

    - + Vampire 0 296246878.51569879.049296029644 @@ -289,7 +289,7 @@

    AUFDTNIRA (Single Query Track)

    - + Alt-Ergo 0 218112610.161101102.78321802188261 @@ -298,7 +298,7 @@

    AUFDTNIRA (Single Query Track)

    - + CVC4 0 20844427.34244427.26920802089237 @@ -307,7 +307,7 @@

    AUFDTNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0974.831683.3480003000 @@ -327,7 +327,7 @@

    AUFDTNIRA (Single Query Track)

    - + CVC4 0 208915.342915.26920802089237 @@ -336,7 +336,7 @@

    AUFDTNIRA (Single Query Track)

    - + Alt-Ergo 0 2022667.3192427.62220202029877 @@ -345,7 +345,7 @@

    AUFDTNIRA (Single Query Track)

    - + Vampire 0 1135104.9214774.3111130113187187 @@ -354,7 +354,7 @@

    AUFDTNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0974.831683.3480003000 @@ -378,7 +378,6 @@

    AUFDTNIRA (Single Query Track)

    - + - diff --git a/archive/2020/results/aufdtnira-unsat-core.html b/archive/2020/results/aufdtnira-unsat-core.html index 6dbcda54..3885be82 100644 --- a/archive/2020/results/aufdtnira-unsat-core.html +++ b/archive/2020/results/aufdtnira-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    AUFDTNIRA (Unsat Core Track)

    Competition results for the AUFDTNIRA - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    AUFDTNIRA (Unsat Core Track)

    Sequential PerformanceParallel Performance CVC4-ucCVC4-uc - - + + @@ -126,7 +126,7 @@

    AUFDTNIRA (Unsat Core Track)

    - + CVC4-uc 0 1948 @@ -137,7 +137,7 @@

    AUFDTNIRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -159,7 +159,7 @@

    AUFDTNIRA (Unsat Core Track)

    - + CVC4-uc 0 19485.1535.2760 @@ -168,7 +168,7 @@

    AUFDTNIRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0162.793121.140 @@ -192,7 +192,6 @@

    AUFDTNIRA (Unsat Core Track)

    - + - diff --git a/archive/2020/results/auffpdtlira-single-query.html b/archive/2020/results/auffpdtlira-single-query.html index 153487e1..6275b553 100644 --- a/archive/2020/results/auffpdtlira-single-query.html +++ b/archive/2020/results/auffpdtlira-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    AUFFPDTLIRA (Single Query Track)

    Competition results for the AUFFPDTLIRA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    AUFFPDTLIRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) CVC4CVC4— - - + + CVC4 - - + + CVC4 - + @@ -131,7 +131,7 @@

    AUFFPDTLIRA (Single Query Track)

    - + CVC4 0 115 @@ -142,7 +142,7 @@

    AUFFPDTLIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -164,7 +164,7 @@

    AUFFPDTLIRA (Single Query Track)

    - + CVC4 0 11521.01120.9721150115180 @@ -173,7 +173,7 @@

    AUFFPDTLIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0435.036299.690001330 @@ -193,7 +193,7 @@

    AUFFPDTLIRA (Single Query Track)

    - + CVC4 0 00.00.00001330 @@ -202,7 +202,7 @@

    AUFFPDTLIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.00001330 @@ -222,7 +222,7 @@

    AUFFPDTLIRA (Single Query Track)

    - + CVC4 0 11518.55118.5181150115180 @@ -231,7 +231,7 @@

    AUFFPDTLIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0373.948259.1930001330 @@ -251,7 +251,7 @@

    AUFFPDTLIRA (Single Query Track)

    - + CVC4 0 11521.01120.9721150115180 @@ -260,7 +260,7 @@

    AUFFPDTLIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0435.036299.690001330 @@ -284,7 +284,6 @@

    AUFFPDTLIRA (Single Query Track)

    - + - diff --git a/archive/2020/results/auffpdtlira-unsat-core.html b/archive/2020/results/auffpdtlira-unsat-core.html index 7892b3d2..c82b1f0c 100644 --- a/archive/2020/results/auffpdtlira-unsat-core.html +++ b/archive/2020/results/auffpdtlira-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    AUFFPDTLIRA (Unsat Core Track)

    Competition results for the AUFFPDTLIRA - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    AUFFPDTLIRA (Unsat Core Track)

    Sequential PerformanceParallel Performance CVC4-ucCVC4-uc - - + + @@ -126,7 +126,7 @@

    AUFFPDTLIRA (Unsat Core Track)

    - + CVC4-uc 0 3667 @@ -137,7 +137,7 @@

    AUFFPDTLIRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -159,7 +159,7 @@

    AUFFPDTLIRA (Unsat Core Track)

    - + CVC4-uc 0 366711.6411.6430 @@ -168,7 +168,7 @@

    AUFFPDTLIRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0367.887269.0410 @@ -192,7 +192,6 @@

    AUFFPDTLIRA (Unsat Core Track)

    - + - diff --git a/archive/2020/results/auflia-single-query.html b/archive/2020/results/auflia-single-query.html index 969ab307..3ddba574 100644 --- a/archive/2020/results/auflia-single-query.html +++ b/archive/2020/results/auflia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    AUFLIA (Single Query Track)

    Competition results for the AUFLIA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    AUFLIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) VampireVampireCVC4 - - + + Vampire - - + + Vampire - + @@ -131,7 +131,7 @@

    AUFLIA (Single Query Track)

    - + 2018-CVC4n 0 1098 @@ -142,7 +142,7 @@

    AUFLIA (Single Query Track)

    - + Vampire 0 1097 @@ -153,7 +153,7 @@

    AUFLIA (Single Query Track)

    - + CVC4 0 1092 @@ -164,7 +164,7 @@

    AUFLIA (Single Query Track)

    - + z3n 0 1029 @@ -175,7 +175,7 @@

    AUFLIA (Single Query Track)

    - + veriT 0 968 @@ -186,7 +186,7 @@

    AUFLIA (Single Query Track)

    - + Alt-Ergo 0 954 @@ -197,7 +197,7 @@

    AUFLIA (Single Query Track)

    - + veriT+viten 0 953 @@ -208,7 +208,7 @@

    AUFLIA (Single Query Track)

    - + SMTInterpol-fixedn 0 801 @@ -219,7 +219,7 @@

    AUFLIA (Single Query Track)

    - + SMTInterpol 0 801 @@ -230,7 +230,7 @@

    AUFLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 15 @@ -252,7 +252,7 @@

    AUFLIA (Single Query Track)

    - + Vampire 0 1099272122.177256380.9021099811018211211 @@ -261,7 +261,7 @@

    AUFLIA (Single Query Track)

    - + 2018-CVC4n 0 1098262178.991262501.9231098111987212199 @@ -270,7 +270,7 @@

    AUFLIA (Single Query Track)

    - + CVC4 0 1092261524.776261773.11092107985218201 @@ -279,7 +279,7 @@

    AUFLIA (Single Query Track)

    - + z3n 0 1029322128.537323115.251029121908281232 @@ -288,7 +288,7 @@

    AUFLIA (Single Query Track)

    - + veriT 0 968352834.183352845.5919680968342216 @@ -297,7 +297,7 @@

    AUFLIA (Single Query Track)

    - + Alt-Ergo 0 965356181.245319964.8469650965345233 @@ -306,7 +306,7 @@

    AUFLIA (Single Query Track)

    - + veriT+viten 0 953274964.003274942.9119530953357212 @@ -315,7 +315,7 @@

    AUFLIA (Single Query Track)

    - + SMTInterpol-fixedn 0 802520618.064516957.60180252750508416 @@ -324,7 +324,7 @@

    AUFLIA (Single Query Track)

    - + SMTInterpol 0 801520629.81517656.6280152749509418 @@ -333,7 +333,7 @@

    AUFLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 1574114.26872067.3051569129557 @@ -353,7 +353,7 @@

    AUFLIA (Single Query Track)

    - + z3n 0 12111679.08811680.73612112101189232 @@ -362,7 +362,7 @@

    AUFLIA (Single Query Track)

    - + 2018-CVC4n 0 11127060.28227142.71611111101199199 @@ -371,7 +371,7 @@

    AUFLIA (Single Query Track)

    - + CVC4 0 10724353.51524404.51510710701203201 @@ -380,7 +380,7 @@

    AUFLIA (Single Query Track)

    - + Vampire 0 8162411.93662408.454818101229211 @@ -389,7 +389,7 @@

    AUFLIA (Single Query Track)

    - + SMTInterpol 0 5223287.77723103.422525201258418 @@ -398,7 +398,7 @@

    AUFLIA (Single Query Track)

    - + SMTInterpol-fixedn 0 5223296.83223105.78525201258416 @@ -407,7 +407,7 @@

    AUFLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 6472.515321.543660130457 @@ -416,7 +416,7 @@

    AUFLIA (Single Query Track)

    - + veriT+viten 0 039693.35639685.5320001310212 @@ -425,7 +425,7 @@

    AUFLIA (Single Query Track)

    - + Alt-Ergo 0 068822.38764670.4430001310233 @@ -434,7 +434,7 @@

    AUFLIA (Single Query Track)

    - + veriT 0 092291.74792298.6210001310216 @@ -454,7 +454,7 @@

    AUFLIA (Single Query Track)

    - + Vampire 0 101827309.86118777.919101801018292211 @@ -463,7 +463,7 @@

    AUFLIA (Single Query Track)

    - + 2018-CVC4n 0 98760666.75160907.1849870987323199 @@ -472,7 +472,7 @@

    AUFLIA (Single Query Track)

    - + CVC4 0 98562717.85862915.0019850985325201 @@ -481,7 +481,7 @@

    AUFLIA (Single Query Track)

    - + veriT 0 96899615.14799617.649680968342216 @@ -490,7 +490,7 @@

    AUFLIA (Single Query Track)

    - + Alt-Ergo 0 965120363.4688657.7359650965345233 @@ -499,7 +499,7 @@

    AUFLIA (Single Query Track)

    - + veriT+viten 0 95390660.66890647.7759530953357212 @@ -508,7 +508,7 @@

    AUFLIA (Single Query Track)

    - + z3n 0 908141316.784142297.3469080908402232 @@ -517,7 +517,7 @@

    AUFLIA (Single Query Track)

    - + SMTInterpol-fixedn 0 750342630.863339238.787500750560416 @@ -526,7 +526,7 @@

    AUFLIA (Single Query Track)

    - + SMTInterpol 0 749342663.581339949.1187490749561418 @@ -535,7 +535,7 @@

    AUFLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 941683.78340079.132909130157 @@ -555,7 +555,7 @@

    AUFLIA (Single Query Track)

    - + Vampire 0 10806853.4165946.875108081999230230 @@ -564,7 +564,7 @@

    AUFLIA (Single Query Track)

    - + z3n 0 10117470.1277470.2171011120891299288 @@ -573,7 +573,7 @@

    AUFLIA (Single Query Track)

    - + 2018-CVC4n 0 9778124.8118124.54997766911333331 @@ -582,7 +582,7 @@

    AUFLIA (Single Query Track)

    - + CVC4 0 9758002.1918003.1797566909335326 @@ -591,7 +591,7 @@

    AUFLIA (Single Query Track)

    - + veriT+viten 0 9467591.1497569.8189460946364238 @@ -600,7 +600,7 @@

    AUFLIA (Single Query Track)

    - + Alt-Ergo 0 9319664.0487844.2589310931379274 @@ -609,7 +609,7 @@

    AUFLIA (Single Query Track)

    - + veriT 0 88010452.07710451.9588800880430430 @@ -618,7 +618,7 @@

    AUFLIA (Single Query Track)

    - + SMTInterpol 0 76213216.86812121.28176252710548465 @@ -627,7 +627,7 @@

    AUFLIA (Single Query Track)

    - + SMTInterpol-fixedn 0 76213219.21912131.68476252710548465 @@ -636,7 +636,7 @@

    AUFLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 156133.5844617.2531569129561 @@ -660,7 +660,6 @@

    AUFLIA (Single Query Track)

    - + - diff --git a/archive/2020/results/auflia-unsat-core.html b/archive/2020/results/auflia-unsat-core.html index bb3ecfe0..f74cc267 100644 --- a/archive/2020/results/auflia-unsat-core.html +++ b/archive/2020/results/auflia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    AUFLIA (Unsat Core Track)

    Competition results for the AUFLIA - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    AUFLIA (Unsat Core Track)

    Sequential PerformanceParallel Performance CVC4-ucCVC4-uc - - + + @@ -126,7 +126,7 @@

    AUFLIA (Unsat Core Track)

    - + CVC4-uc 0 28719 @@ -137,7 +137,7 @@

    AUFLIA (Unsat Core Track)

    - + z3n 0 25961 @@ -148,7 +148,7 @@

    AUFLIA (Unsat Core Track)

    - + SMTInterpol 0 20101 @@ -159,7 +159,7 @@

    AUFLIA (Unsat Core Track)

    - + SMTInterpol-fixedn 0 20099 @@ -170,7 +170,7 @@

    AUFLIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 14 @@ -192,7 +192,7 @@

    AUFLIA (Unsat Core Track)

    - + CVC4-uc 0 28719115311.173115307.72990 @@ -201,7 +201,7 @@

    AUFLIA (Unsat Core Track)

    - + z3n 0 25961113601.68113605.88175 @@ -210,7 +210,7 @@

    AUFLIA (Unsat Core Track)

    - + SMTInterpol 0 20101318845.282315348.428251 @@ -219,7 +219,7 @@

    AUFLIA (Unsat Core Track)

    - + SMTInterpol-fixedn 0 20099318740.834315953.385252 @@ -228,7 +228,7 @@

    AUFLIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 1444105.41241649.39432 @@ -252,7 +252,6 @@

    AUFLIA (Unsat Core Track)

    - + - diff --git a/archive/2020/results/auflira-single-query.html b/archive/2020/results/auflira-single-query.html index 5651d7a3..c2ebe3b9 100644 --- a/archive/2020/results/auflira-single-query.html +++ b/archive/2020/results/auflira-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    AUFLIRA (Single Query Track)

    Competition results for the AUFLIRA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    AUFLIRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) CVC4Vampire— - - + + Vampire - - + + Vampire - + @@ -131,7 +131,7 @@

    AUFLIRA (Single Query Track)

    - + 2019-Par4n 0 1280 @@ -142,7 +142,7 @@

    AUFLIRA (Single Query Track)

    - + CVC4 0 1246 @@ -153,7 +153,7 @@

    AUFLIRA (Single Query Track)

    - + z3n 0 1238 @@ -164,7 +164,7 @@

    AUFLIRA (Single Query Track)

    - + Vampire 0 1225 @@ -175,7 +175,7 @@

    AUFLIRA (Single Query Track)

    - + Alt-Ergo 0 1206 @@ -186,7 +186,7 @@

    AUFLIRA (Single Query Track)

    - + veriT 0 1073 @@ -197,7 +197,7 @@

    AUFLIRA (Single Query Track)

    - + veriT+viten 0 1071 @@ -208,7 +208,7 @@

    AUFLIRA (Single Query Track)

    - + SMTInterpol-fixedn 0 995 @@ -219,7 +219,7 @@

    AUFLIRA (Single Query Track)

    - + SMTInterpol 0 995 @@ -230,7 +230,7 @@

    AUFLIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -252,7 +252,7 @@

    AUFLIRA (Single Query Track)

    - + 2019-Par4n 0 128079349.71379314.47812804112396664 @@ -261,7 +261,7 @@

    AUFLIRA (Single Query Track)

    - + Vampire 0 1248166260.748129581.6651248012489898 @@ -270,7 +270,7 @@

    AUFLIRA (Single Query Track)

    - + CVC4 0 1246122588.736122599.6712460124610099 @@ -279,7 +279,7 @@

    AUFLIRA (Single Query Track)

    - + z3n 0 123894623.04494625.56812380123810870 @@ -288,7 +288,7 @@

    AUFLIRA (Single Query Track)

    - + Alt-Ergo 0 1217173319.081142481.244121701217129109 @@ -297,7 +297,7 @@

    AUFLIRA (Single Query Track)

    - + veriT 0 1073327574.453327575.269107301073273272 @@ -306,7 +306,7 @@

    AUFLIRA (Single Query Track)

    - + veriT+viten 0 1071330078.823330079.485107101071275202 @@ -315,7 +315,7 @@

    AUFLIRA (Single Query Track)

    - + SMTInterpol 0 995442284.741416608.2059950995351333 @@ -324,7 +324,7 @@

    AUFLIRA (Single Query Track)

    - + SMTInterpol-fixedn 0 995433182.626417552.3819950995351338 @@ -333,7 +333,7 @@

    AUFLIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 044721.52440887.366000134630 @@ -353,7 +353,7 @@

    AUFLIRA (Single Query Track)

    - + 2019-Par4n 0 411311.3841260.12341410130564 @@ -362,7 +362,7 @@

    AUFLIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0167.487107.533000134630 @@ -371,7 +371,7 @@

    AUFLIRA (Single Query Track)

    - + z3n 0 043408.82843408.839000134670 @@ -380,7 +380,7 @@

    AUFLIRA (Single Query Track)

    - + SMTInterpol-fixedn 0 048995.93348360.5160001346338 @@ -389,7 +389,7 @@

    AUFLIRA (Single Query Track)

    - + Alt-Ergo 0 049200.449200.130001346109 @@ -398,7 +398,7 @@

    AUFLIRA (Single Query Track)

    - + SMTInterpol 0 049225.14349212.7940001346333 @@ -407,7 +407,7 @@

    AUFLIRA (Single Query Track)

    - + CVC4 0 049474.2249474.347000134699 @@ -416,7 +416,7 @@

    AUFLIRA (Single Query Track)

    - + veriT 0 049876.3449876.4870001346272 @@ -425,7 +425,7 @@

    AUFLIRA (Single Query Track)

    - + veriT+viten 0 050400.050400.00001346202 @@ -434,7 +434,7 @@

    AUFLIRA (Single Query Track)

    - + Vampire 0 050400.050400.0000134698 @@ -454,7 +454,7 @@

    AUFLIRA (Single Query Track)

    - + Vampire 0 124848660.40815585.4951248012489898 @@ -463,7 +463,7 @@

    AUFLIRA (Single Query Track)

    - + CVC4 0 12469514.5169525.32312460124610099 @@ -472,7 +472,7 @@

    AUFLIRA (Single Query Track)

    - + 2019-Par4n 0 123914438.32914454.35412390123910764 @@ -481,7 +481,7 @@

    AUFLIRA (Single Query Track)

    - + z3n 0 123814826.48114826.66112380123810870 @@ -490,7 +490,7 @@

    AUFLIRA (Single Query Track)

    - + Alt-Ergo 0 121775901.49248868.345121701217129109 @@ -499,7 +499,7 @@

    AUFLIRA (Single Query Track)

    - + veriT 0 1073214098.113214098.782107301073273272 @@ -508,7 +508,7 @@

    AUFLIRA (Single Query Track)

    - + veriT+viten 0 1071216078.823216079.485107101071275202 @@ -517,7 +517,7 @@

    AUFLIRA (Single Query Track)

    - + SMTInterpol 0 995329039.148304392.2159950995351333 @@ -526,7 +526,7 @@

    AUFLIRA (Single Query Track)

    - + SMTInterpol-fixedn 0 995319951.773306130.9789950995351338 @@ -535,7 +535,7 @@

    AUFLIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 044367.75740652.648000134630 @@ -555,7 +555,7 @@

    AUFLIRA (Single Query Track)

    - + 2019-Par4n 0 12801733.7131698.47812804112396664 @@ -564,7 +564,7 @@

    AUFLIRA (Single Query Track)

    - + z3n 0 12382383.5542383.69412380123810894 @@ -573,7 +573,7 @@

    AUFLIRA (Single Query Track)

    - + Vampire 0 12144347.8463590.148121401214132132 @@ -582,7 +582,7 @@

    AUFLIRA (Single Query Track)

    - + CVC4 0 11983663.9353663.643119801198148148 @@ -591,7 +591,7 @@

    AUFLIRA (Single Query Track)

    - + Alt-Ergo 0 11894230.5073638.529118901189157138 @@ -600,7 +600,7 @@

    AUFLIRA (Single Query Track)

    - + veriT+viten 0 10716678.8236679.485107101071275202 @@ -609,7 +609,7 @@

    AUFLIRA (Single Query Track)

    - + veriT 0 10706690.0626690.721107001070276276 @@ -618,7 +618,7 @@

    AUFLIRA (Single Query Track)

    - + SMTInterpol-fixedn 0 9879417.329072.2159870987359358 @@ -627,7 +627,7 @@

    AUFLIRA (Single Query Track)

    - + SMTInterpol 0 9879422.6079072.7189870987359358 @@ -636,7 +636,7 @@

    AUFLIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 05775.4294218.25000134643 @@ -660,7 +660,6 @@

    AUFLIRA (Single Query Track)

    - + - diff --git a/archive/2020/results/auflira-unsat-core.html b/archive/2020/results/auflira-unsat-core.html index 2dc5fa1a..07b05bca 100644 --- a/archive/2020/results/auflira-unsat-core.html +++ b/archive/2020/results/auflira-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    AUFLIRA (Unsat Core Track)

    Competition results for the AUFLIRA - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    AUFLIRA (Unsat Core Track)

    Sequential PerformanceParallel Performance CVC4-ucCVC4-uc - - + + @@ -126,7 +126,7 @@

    AUFLIRA (Unsat Core Track)

    - + z3n 0 142486 @@ -137,7 +137,7 @@

    AUFLIRA (Unsat Core Track)

    - + CVC4-uc 0 142347 @@ -148,7 +148,7 @@

    AUFLIRA (Unsat Core Track)

    - + SMTInterpol 0 120065 @@ -159,7 +159,7 @@

    AUFLIRA (Unsat Core Track)

    - + SMTInterpol-fixedn 0 120065 @@ -170,7 +170,7 @@

    AUFLIRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -192,7 +192,7 @@

    AUFLIRA (Unsat Core Track)

    - + z3n 0 1424868009.3768010.9742 @@ -201,7 +201,7 @@

    AUFLIRA (Unsat Core Track)

    - + CVC4-uc 0 14234749847.0849846.49241 @@ -210,7 +210,7 @@

    AUFLIRA (Unsat Core Track)

    - + SMTInterpol-fixedn 0 120065446317.495415463.492327 @@ -219,7 +219,7 @@

    AUFLIRA (Unsat Core Track)

    - + SMTInterpol 0 120065444228.353417250.119330 @@ -228,7 +228,7 @@

    AUFLIRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 087175.13871554.90740 @@ -252,7 +252,6 @@

    AUFLIRA (Unsat Core Track)

    - + - diff --git a/archive/2020/results/aufnia-single-query.html b/archive/2020/results/aufnia-single-query.html index b0fa49f8..e49cf579 100644 --- a/archive/2020/results/aufnia-single-query.html +++ b/archive/2020/results/aufnia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    AUFNIA (Single Query Track)

    Competition results for the AUFNIA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    AUFNIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) ——— - - + + — - - + + — - + @@ -131,7 +131,7 @@

    AUFNIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -142,7 +142,7 @@

    AUFNIA (Single Query Track)

    - + CVC4 0 0 @@ -153,7 +153,7 @@

    AUFNIA (Single Query Track)

    - + z3n 0 0 @@ -164,7 +164,7 @@

    AUFNIA (Single Query Track)

    - + Alt-Ergo 0 0 @@ -175,7 +175,7 @@

    AUFNIA (Single Query Track)

    - + Vampire 0 0 @@ -197,7 +197,7 @@

    AUFNIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 010.1987.04100030 @@ -206,7 +206,7 @@

    AUFNIA (Single Query Track)

    - + Alt-Ergo 0 07200.23551.000033 @@ -215,7 +215,7 @@

    AUFNIA (Single Query Track)

    - + Vampire 0 03600.03600.000033 @@ -224,7 +224,7 @@

    AUFNIA (Single Query Track)

    - + CVC4 0 03600.03600.000033 @@ -233,7 +233,7 @@

    AUFNIA (Single Query Track)

    - + z3n 0 03600.03600.000033 @@ -253,7 +253,7 @@

    AUFNIA (Single Query Track)

    - + Alt-Ergo 0 00.00.000033 @@ -262,7 +262,7 @@

    AUFNIA (Single Query Track)

    - + Vampire 0 00.00.000033 @@ -271,7 +271,7 @@

    AUFNIA (Single Query Track)

    - + CVC4 0 00.00.000033 @@ -280,7 +280,7 @@

    AUFNIA (Single Query Track)

    - + z3n 0 00.00.000033 @@ -289,7 +289,7 @@

    AUFNIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.000030 @@ -309,7 +309,7 @@

    AUFNIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 010.1987.04100030 @@ -318,7 +318,7 @@

    AUFNIA (Single Query Track)

    - + Alt-Ergo 0 07200.23551.000033 @@ -327,7 +327,7 @@

    AUFNIA (Single Query Track)

    - + Vampire 0 03600.03600.000033 @@ -336,7 +336,7 @@

    AUFNIA (Single Query Track)

    - + CVC4 0 03600.03600.000033 @@ -345,7 +345,7 @@

    AUFNIA (Single Query Track)

    - + z3n 0 03600.03600.000033 @@ -365,7 +365,7 @@

    AUFNIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 010.1987.04100030 @@ -374,7 +374,7 @@

    AUFNIA (Single Query Track)

    - + Alt-Ergo 0 072.072.000033 @@ -383,7 +383,7 @@

    AUFNIA (Single Query Track)

    - + Vampire 0 072.072.000033 @@ -392,7 +392,7 @@

    AUFNIA (Single Query Track)

    - + CVC4 0 072.072.000033 @@ -401,7 +401,7 @@

    AUFNIA (Single Query Track)

    - + z3n 0 072.072.000033 @@ -425,7 +425,6 @@

    AUFNIA (Single Query Track)

    - + - diff --git a/archive/2020/results/aufnia-unsat-core.html b/archive/2020/results/aufnia-unsat-core.html index 82fcdcb8..45b3c28d 100644 --- a/archive/2020/results/aufnia-unsat-core.html +++ b/archive/2020/results/aufnia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    AUFNIA (Unsat Core Track)

    Competition results for the AUFNIA - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    AUFNIA (Unsat Core Track)

    Sequential PerformanceParallel Performance —— - - + + @@ -126,7 +126,7 @@

    AUFNIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -137,7 +137,7 @@

    AUFNIA (Unsat Core Track)

    - + CVC4-uc 0 0 @@ -148,7 +148,7 @@

    AUFNIA (Unsat Core Track)

    - + z3n 0 0 @@ -170,7 +170,7 @@

    AUFNIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 010.4057.1260 @@ -179,7 +179,7 @@

    AUFNIA (Unsat Core Track)

    - + CVC4-uc 0 03600.03600.03 @@ -188,7 +188,7 @@

    AUFNIA (Unsat Core Track)

    - + z3n 0 03600.03600.03 @@ -212,7 +212,6 @@

    AUFNIA (Unsat Core Track)

    - + - diff --git a/archive/2020/results/aufnira-incremental.html b/archive/2020/results/aufnira-incremental.html index ab215b71..d7dbff22 100644 --- a/archive/2020/results/aufnira-incremental.html +++ b/archive/2020/results/aufnira-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    AUFNIRA (Incremental Track)

    Competition results for the AUFNIRA - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    AUFNIRA (Incremental Track)

    Parallel Performance CVC4-inc - - + + @@ -124,7 +124,7 @@

    AUFNIRA (Incremental Track)

    - + 2019-CVC4-incn 0 311426436.15526436.73833821 @@ -133,7 +133,7 @@

    AUFNIRA (Incremental Track)

    - + CVC4-inc 0 309527947.71627947.63335722 @@ -142,7 +142,7 @@

    AUFNIRA (Incremental Track)

    - + z3n 0 304344382.78944384.10640932 @@ -151,7 +151,7 @@

    AUFNIRA (Incremental Track)

    - + UltimateEliminator+MathSAT 0 0505.817239.58534520 @@ -175,7 +175,6 @@

    AUFNIRA (Incremental Track)

    - + - diff --git a/archive/2020/results/aufnira-single-query.html b/archive/2020/results/aufnira-single-query.html index 0960fe00..a383fedf 100644 --- a/archive/2020/results/aufnira-single-query.html +++ b/archive/2020/results/aufnira-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    AUFNIRA (Single Query Track)

    Competition results for the AUFNIRA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    AUFNIRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) CVC4Vampire— - - + + Vampire - - + + Vampire - + @@ -131,7 +131,7 @@

    AUFNIRA (Single Query Track)

    - + CVC4 0 59 @@ -142,7 +142,7 @@

    AUFNIRA (Single Query Track)

    - + Vampire 0 59 @@ -153,7 +153,7 @@

    AUFNIRA (Single Query Track)

    - + 2019-Par4n 0 59 @@ -164,7 +164,7 @@

    AUFNIRA (Single Query Track)

    - + Alt-Ergo 0 39 @@ -175,7 +175,7 @@

    AUFNIRA (Single Query Track)

    - + z3n 0 38 @@ -186,7 +186,7 @@

    AUFNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -208,7 +208,7 @@

    AUFNIRA (Single Query Track)

    - + Vampire 0 61301426.23289632.90761061239239 @@ -217,7 +217,7 @@

    AUFNIRA (Single Query Track)

    - + CVC4 0 59289546.994289689.0159059241237 @@ -226,7 +226,7 @@

    AUFNIRA (Single Query Track)

    - + 2019-Par4n 0 59296107.865292682.97159356241239 @@ -235,7 +235,7 @@

    AUFNIRA (Single Query Track)

    - + Alt-Ergo 0 40310623.263296479.42140040260245 @@ -244,7 +244,7 @@

    AUFNIRA (Single Query Track)

    - + z3n 0 38199078.357200465.8373843426292 @@ -253,7 +253,7 @@

    AUFNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 05842.4976208.7530003004 @@ -273,7 +273,7 @@

    AUFNIRA (Single Query Track)

    - + z3n 0 40.1660.16744029692 @@ -282,7 +282,7 @@

    AUFNIRA (Single Query Track)

    - + 2019-Par4n 0 31200.021200.052330297239 @@ -291,7 +291,7 @@

    AUFNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 013.7859.7350003004 @@ -300,7 +300,7 @@

    AUFNIRA (Single Query Track)

    - + CVC4 0 02761.1622764.754000300237 @@ -309,7 +309,7 @@

    AUFNIRA (Single Query Track)

    - + Alt-Ergo 0 03600.143600.051000300245 @@ -318,7 +318,7 @@

    AUFNIRA (Single Query Track)

    - + Vampire 0 04800.04800.0000300239 @@ -338,7 +338,7 @@

    AUFNIRA (Single Query Track)

    - + Vampire 0 6119426.1611239.41761061239239 @@ -347,7 +347,7 @@

    AUFNIRA (Single Query Track)

    - + CVC4 0 5915583.3915721.82159059241237 @@ -356,7 +356,7 @@

    AUFNIRA (Single Query Track)

    - + 2019-Par4n 0 5621307.84517882.91956056244239 @@ -365,7 +365,7 @@

    AUFNIRA (Single Query Track)

    - + Alt-Ergo 0 4036554.00333506.11440040260245 @@ -374,7 +374,7 @@

    AUFNIRA (Single Query Track)

    - + z3n 0 3423231.37223437.2863403426692 @@ -383,7 +383,7 @@

    AUFNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 01434.221601.6750003004 @@ -403,7 +403,7 @@

    AUFNIRA (Single Query Track)

    - + Vampire 0 516597.1936137.14251051249249 @@ -412,7 +412,7 @@

    AUFNIRA (Single Query Track)

    - + 2019-Par4n 0 426207.8756203.77442339258256 @@ -421,7 +421,7 @@

    AUFNIRA (Single Query Track)

    - + CVC4 0 396252.8716249.50239039261258 @@ -430,7 +430,7 @@

    AUFNIRA (Single Query Track)

    - + z3n 0 385826.75826.70538434262238 @@ -439,7 +439,7 @@

    AUFNIRA (Single Query Track)

    - + Alt-Ergo 0 356370.1436193.58935035265254 @@ -448,7 +448,7 @@

    AUFNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 01198.772874.340003007 @@ -472,7 +472,6 @@

    AUFNIRA (Single Query Track)

    - + - diff --git a/archive/2020/results/aufnira-unsat-core.html b/archive/2020/results/aufnira-unsat-core.html index e244fee5..2cf3cc23 100644 --- a/archive/2020/results/aufnira-unsat-core.html +++ b/archive/2020/results/aufnira-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    AUFNIRA (Unsat Core Track)

    Competition results for the AUFNIRA - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    AUFNIRA (Unsat Core Track)

    Sequential PerformanceParallel Performance CVC4-ucCVC4-uc - - + + @@ -126,7 +126,7 @@

    AUFNIRA (Unsat Core Track)

    - + CVC4-uc 0 13141 @@ -137,7 +137,7 @@

    AUFNIRA (Unsat Core Track)

    - + z3n 0 11092 @@ -148,7 +148,7 @@

    AUFNIRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -170,7 +170,7 @@

    AUFNIRA (Unsat Core Track)

    - + CVC4-uc 0 131416091.9426091.955 @@ -179,7 +179,7 @@

    AUFNIRA (Unsat Core Track)

    - + z3n 0 110925759.1375760.9691 @@ -188,7 +188,7 @@

    AUFNIRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 01527.1681028.4430 @@ -212,7 +212,6 @@

    AUFNIRA (Unsat Core Track)

    - + - diff --git a/archive/2020/results/biggest-lead-incremental.html b/archive/2020/results/biggest-lead-incremental.html index 99580964..cc8e99c6 100644 --- a/archive/2020/results/biggest-lead-incremental.html +++ b/archive/2020/results/biggest-lead-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -90,27 +90,27 @@

    Winners

    - - + + - - - - + + + + - - + + - - - - + + + +
    Parallel Performance
    - + CVC4-inc - +
    @@ -130,7 +130,7 @@

    Parallel Performance

    - + CVC4-inc 34477.0 @@ -144,7 +144,7 @@

    Parallel Performance

    - + CVC4-inc 23489.0 @@ -158,7 +158,7 @@

    Parallel Performance

    - + CVC4-inc 3096.0 @@ -172,7 +172,7 @@

    Parallel Performance

    - + CVC4-inc 29.50136927 @@ -186,7 +186,7 @@

    Parallel Performance

    - + CVC4-inc 12.0 @@ -200,7 +200,7 @@

    Parallel Performance

    - + CVC4-inc 3.92418337 @@ -214,7 +214,7 @@

    Parallel Performance

    - + CVC4-inc 2.70765068 @@ -228,7 +228,7 @@

    Parallel Performance

    - + Yices2 incremental 1.56155513 @@ -242,7 +242,7 @@

    Parallel Performance

    - + Yices2 incremental 1.36455331 @@ -256,7 +256,7 @@

    Parallel Performance

    - + CVC4-inc 1.15532084 @@ -270,7 +270,7 @@

    Parallel Performance

    - + CVC4-inc 1.14488242 @@ -284,7 +284,7 @@

    Parallel Performance

    - + CVC4-inc 1.125 @@ -298,7 +298,7 @@

    Parallel Performance

    - + Yices2 incremental 1.10621835 @@ -312,7 +312,7 @@

    Parallel Performance

    - + Yices2 incremental 1.06716332 @@ -326,7 +326,7 @@

    Parallel Performance

    - + CVC4-inc 1.05700264 @@ -340,7 +340,7 @@

    Parallel Performance

    - + Bitwuzla 1.05517241 @@ -354,7 +354,7 @@

    Parallel Performance

    - + Yices2 incremental 1.03050847 @@ -368,7 +368,7 @@

    Parallel Performance

    - + Bitwuzla 1.01369863 @@ -382,7 +382,7 @@

    Parallel Performance

    - + Bitwuzla 1.00242131 @@ -396,7 +396,7 @@

    Parallel Performance

    - + Bitwuzla 1.00178891 @@ -410,7 +410,7 @@

    Parallel Performance

    - + Yices2 incremental 1.00166336 @@ -424,7 +424,7 @@

    Parallel Performance

    - + Yices2 incremental 1.00030681 @@ -438,7 +438,7 @@

    Parallel Performance

    - + Yices2 incremental 1.0 @@ -452,7 +452,7 @@

    Parallel Performance

    - + Bitwuzla 1.0 @@ -466,7 +466,7 @@

    Parallel Performance

    - + UltimateEliminator+MathSAT 1.0 @@ -480,7 +480,7 @@

    Parallel Performance

    - + CVC4-inc 1.0 @@ -518,7 +518,6 @@

    Parallel Performance

    - + - diff --git a/archive/2020/results/biggest-lead-model-validation.html b/archive/2020/results/biggest-lead-model-validation.html index 6277fa6a..465eeda5 100644 --- a/archive/2020/results/biggest-lead-model-validation.html +++ b/archive/2020/results/biggest-lead-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -90,35 +90,35 @@

    Winners

    - + - - + + - - - - + + + + - + - - + + - - - - + + + +
    Sequential PerformanceParallel Performance
    - + Bitwuzla - + - + Bitwuzla - +

    Sequential Performance

    @@ -136,7 +136,7 @@

    Sequential Performance

    - + Yices2 Model Validation 1.1040724 @@ -150,7 +150,7 @@

    Sequential Performance

    - + CVC4-mv 1.0927357 @@ -164,7 +164,7 @@

    Sequential Performance

    - + Yices2 Model Validation 1.02803738 @@ -178,7 +178,7 @@

    Sequential Performance

    - + OpenSMT 1.01381215 @@ -192,7 +192,7 @@

    Sequential Performance

    - + Bitwuzla 1.00667369 @@ -206,7 +206,7 @@

    Sequential Performance

    - + Yices2 Model Validation 1.0 @@ -235,7 +235,7 @@

    Parallel Performance

    - + Yices2 Model Validation 1.1040724 @@ -249,7 +249,7 @@

    Parallel Performance

    - + CVC4-mv 1.0927357 @@ -263,7 +263,7 @@

    Parallel Performance

    - + Yices2 Model Validation 1.02803738 @@ -277,7 +277,7 @@

    Parallel Performance

    - + OpenSMT 1.01381215 @@ -291,7 +291,7 @@

    Parallel Performance

    - + Bitwuzla 1.00262375 @@ -305,7 +305,7 @@

    Parallel Performance

    - + Yices2 Model Validation 1.0 @@ -343,7 +343,6 @@

    Parallel Performance

    - + - diff --git a/archive/2020/results/biggest-lead-single-query.html b/archive/2020/results/biggest-lead-single-query.html index d15ed154..6eeb5d40 100644 --- a/archive/2020/results/biggest-lead-single-query.html +++ b/archive/2020/results/biggest-lead-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -90,59 +90,59 @@

    Winners

    - + - - + + - - + + - - + + - - + + - + - + - - + + - - + + - - + + - - + + - +
    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24 seconds Performance (parallel)
    - + CVC4 - + - + CVC4 - + - + CVC4 - + - + CVC4 - + - + CVC4 - +

    Sequential Performance

    @@ -160,7 +160,7 @@

    Sequential Performance

    - + CVC4 375.0 @@ -174,7 +174,7 @@

    Sequential Performance

    - + CVC4 327.0 @@ -188,7 +188,7 @@

    Sequential Performance

    - + CVC4 116.0 @@ -202,7 +202,7 @@

    Sequential Performance

    - + CVC4 93.0 @@ -216,7 +216,7 @@

    Sequential Performance

    - + CVC4 19.0 @@ -230,7 +230,7 @@

    Sequential Performance

    - + CVC4 16.0 @@ -244,7 +244,7 @@

    Sequential Performance

    - + CVC4 15.0 @@ -258,7 +258,7 @@

    Sequential Performance

    - + CVC4 8.0 @@ -272,7 +272,7 @@

    Sequential Performance

    - + CVC4 5.0 @@ -286,7 +286,7 @@

    Sequential Performance

    - + CVC4 4.45098039 @@ -300,7 +300,7 @@

    Sequential Performance

    - + CVC4 4.07142857 @@ -314,7 +314,7 @@

    Sequential Performance

    - + CVC4 4.0 @@ -328,7 +328,7 @@

    Sequential Performance

    - + CVC4 2.69090909 @@ -342,7 +342,7 @@

    Sequential Performance

    - + CVC4 2.68859649 @@ -356,7 +356,7 @@

    Sequential Performance

    - + Yices2 2.0 @@ -370,7 +370,7 @@

    Sequential Performance

    - + CVC4 1.84259259 @@ -384,7 +384,7 @@

    Sequential Performance

    - + Yices2 1.73333333 @@ -398,7 +398,7 @@

    Sequential Performance

    - + CVC4 1.60068847 @@ -412,7 +412,7 @@

    Sequential Performance

    - + CVC4 1.54330709 @@ -426,7 +426,7 @@

    Sequential Performance

    - + SMT-RAT 1.5 @@ -440,7 +440,7 @@

    Sequential Performance

    - + CVC4 1.45454545 @@ -454,7 +454,7 @@

    Sequential Performance

    - + CVC4 1.45172414 @@ -468,7 +468,7 @@

    Sequential Performance

    - + CVC4 1.44444444 @@ -482,7 +482,7 @@

    Sequential Performance

    - + Vampire 1.31746032 @@ -496,7 +496,7 @@

    Sequential Performance

    - + CVC4 1.2950495 @@ -510,7 +510,7 @@

    Sequential Performance

    - + CVC4 1.25 @@ -524,7 +524,7 @@

    Sequential Performance

    - + CVC4 1.22222222 @@ -538,7 +538,7 @@

    Sequential Performance

    - + COLIBRI 1.17021277 @@ -552,7 +552,7 @@

    Sequential Performance

    - + Yices2 1.16666667 @@ -566,7 +566,7 @@

    Sequential Performance

    - + CVC4 1.14270428 @@ -580,7 +580,7 @@

    Sequential Performance

    - + Yices2 1.13162939 @@ -594,7 +594,7 @@

    Sequential Performance

    - + CVC4 1.10791367 @@ -608,7 +608,7 @@

    Sequential Performance

    - + Yices2 1.09104258 @@ -622,7 +622,7 @@

    Sequential Performance

    - + Bitwuzla 1.08837209 @@ -636,7 +636,7 @@

    Sequential Performance

    - + CVC4 1.06952491 @@ -650,7 +650,7 @@

    Sequential Performance

    - + SMTInterpol 1.05263158 @@ -664,7 +664,7 @@

    Sequential Performance

    - + Yices2 1.04912281 @@ -678,7 +678,7 @@

    Sequential Performance

    - + CVC4 1.04460905 @@ -692,7 +692,7 @@

    Sequential Performance

    - + CVC4 1.0418372 @@ -706,7 +706,7 @@

    Sequential Performance

    - + OpenSMT 1.03826531 @@ -720,7 +720,7 @@

    Sequential Performance

    - + Vampire 1.0372093 @@ -734,7 +734,7 @@

    Sequential Performance

    - + CVC4 1.03481013 @@ -748,7 +748,7 @@

    Sequential Performance

    - + CVC4 1.02898551 @@ -762,7 +762,7 @@

    Sequential Performance

    - + CVC4 1.02882663 @@ -776,7 +776,7 @@

    Sequential Performance

    - + Bitwuzla 1.02279289 @@ -790,7 +790,7 @@

    Sequential Performance

    - + Bitwuzla 1.02040816 @@ -804,7 +804,7 @@

    Sequential Performance

    - + Yices2 1.01923077 @@ -818,7 +818,7 @@

    Sequential Performance

    - + Yices2 1.01904762 @@ -832,7 +832,7 @@

    Sequential Performance

    - + CVC4 1.01712887 @@ -846,7 +846,7 @@

    Sequential Performance

    - + CVC4 1.01460789 @@ -860,7 +860,7 @@

    Sequential Performance

    - + Vampire 1.01037344 @@ -874,7 +874,7 @@

    Sequential Performance

    - + CVC4 1.00716846 @@ -888,7 +888,7 @@

    Sequential Performance

    - + CVC4 1.00609756 @@ -902,7 +902,7 @@

    Sequential Performance

    - + Vampire 1.00457457 @@ -916,7 +916,7 @@

    Sequential Performance

    - + Bitwuzla 1.00357249 @@ -930,7 +930,7 @@

    Sequential Performance

    - + Yices2 1.0 @@ -944,7 +944,7 @@

    Sequential Performance

    - + Vampire 1.0 @@ -958,7 +958,7 @@

    Sequential Performance

    - + Yices2 1.0 @@ -972,7 +972,7 @@

    Sequential Performance

    - + Yices2 1.0 @@ -986,7 +986,7 @@

    Sequential Performance

    - + Yices2 1.0 @@ -1000,7 +1000,7 @@

    Sequential Performance

    - + Bitwuzla 1.0 @@ -1014,7 +1014,7 @@

    Sequential Performance

    - + Yices2 1.0 @@ -1028,7 +1028,7 @@

    Sequential Performance

    - + Alt-Ergo 1.0 @@ -1042,7 +1042,7 @@

    Sequential Performance

    - + Bitwuzla 1.0 @@ -1056,7 +1056,7 @@

    Sequential Performance

    - + CVC4 1.0 @@ -1070,7 +1070,7 @@

    Sequential Performance

    - + Yices2 1.0 @@ -1099,7 +1099,7 @@

    Parallel Performance

    - + CVC4 375.0 @@ -1113,7 +1113,7 @@

    Parallel Performance

    - + CVC4 327.0 @@ -1127,7 +1127,7 @@

    Parallel Performance

    - + CVC4 116.0 @@ -1141,7 +1141,7 @@

    Parallel Performance

    - + CVC4 93.0 @@ -1155,7 +1155,7 @@

    Parallel Performance

    - + CVC4 19.0 @@ -1169,7 +1169,7 @@

    Parallel Performance

    - + CVC4 16.0 @@ -1183,7 +1183,7 @@

    Parallel Performance

    - + CVC4 15.0 @@ -1197,7 +1197,7 @@

    Parallel Performance

    - + CVC4 8.0 @@ -1211,7 +1211,7 @@

    Parallel Performance

    - + CVC4 5.0 @@ -1225,7 +1225,7 @@

    Parallel Performance

    - + CVC4 4.45098039 @@ -1239,7 +1239,7 @@

    Parallel Performance

    - + CVC4 4.07142857 @@ -1253,7 +1253,7 @@

    Parallel Performance

    - + CVC4 4.0 @@ -1267,7 +1267,7 @@

    Parallel Performance

    - + CVC4 2.69090909 @@ -1281,7 +1281,7 @@

    Parallel Performance

    - + CVC4 2.68859649 @@ -1295,7 +1295,7 @@

    Parallel Performance

    - + Yices2 2.0 @@ -1309,7 +1309,7 @@

    Parallel Performance

    - + CVC4 1.84259259 @@ -1323,7 +1323,7 @@

    Parallel Performance

    - + Yices2 1.73333333 @@ -1337,7 +1337,7 @@

    Parallel Performance

    - + CVC4 1.53125 @@ -1351,7 +1351,7 @@

    Parallel Performance

    - + SMT-RAT 1.5 @@ -1365,7 +1365,7 @@

    Parallel Performance

    - + CVC4 1.45454545 @@ -1379,7 +1379,7 @@

    Parallel Performance

    - + CVC4 1.44444444 @@ -1393,7 +1393,7 @@

    Parallel Performance

    - + Vampire 1.35616438 @@ -1407,7 +1407,7 @@

    Parallel Performance

    - + Vampire 1.31746032 @@ -1421,7 +1421,7 @@

    Parallel Performance

    - + CVC4 1.29526462 @@ -1435,7 +1435,7 @@

    Parallel Performance

    - + CVC4 1.2950495 @@ -1449,7 +1449,7 @@

    Parallel Performance

    - + CVC4 1.28353659 @@ -1463,7 +1463,7 @@

    Parallel Performance

    - + CVC4 1.25 @@ -1477,7 +1477,7 @@

    Parallel Performance

    - + CVC4 1.22222222 @@ -1491,7 +1491,7 @@

    Parallel Performance

    - + COLIBRI 1.17021277 @@ -1505,7 +1505,7 @@

    Parallel Performance

    - + Yices2 1.16666667 @@ -1519,7 +1519,7 @@

    Parallel Performance

    - + CVC4 1.14270428 @@ -1533,7 +1533,7 @@

    Parallel Performance

    - + Yices2 1.13162939 @@ -1547,7 +1547,7 @@

    Parallel Performance

    - + CVC4 1.10791367 @@ -1561,7 +1561,7 @@

    Parallel Performance

    - + Yices2 1.09104258 @@ -1575,7 +1575,7 @@

    Parallel Performance

    - + Bitwuzla 1.08837209 @@ -1589,7 +1589,7 @@

    Parallel Performance

    - + CVC4 1.06952491 @@ -1603,7 +1603,7 @@

    Parallel Performance

    - + SMTInterpol 1.05263158 @@ -1617,7 +1617,7 @@

    Parallel Performance

    - + Yices2 1.04912281 @@ -1631,7 +1631,7 @@

    Parallel Performance

    - + Vampire 1.0466805 @@ -1645,7 +1645,7 @@

    Parallel Performance

    - + CVC4 1.04477366 @@ -1659,7 +1659,7 @@

    Parallel Performance

    - + CVC4 1.0418372 @@ -1673,7 +1673,7 @@

    Parallel Performance

    - + OpenSMT 1.03826531 @@ -1687,7 +1687,7 @@

    Parallel Performance

    - + CVC4 1.03481013 @@ -1701,7 +1701,7 @@

    Parallel Performance

    - + Vampire 1.03333333 @@ -1715,7 +1715,7 @@

    Parallel Performance

    - + CVC4 1.02898551 @@ -1729,7 +1729,7 @@

    Parallel Performance

    - + CVC4 1.02840909 @@ -1743,7 +1743,7 @@

    Parallel Performance

    - + Vampire 1.02121212 @@ -1757,7 +1757,7 @@

    Parallel Performance

    - + Bitwuzla 1.02040816 @@ -1771,7 +1771,7 @@

    Parallel Performance

    - + Yices2 1.01923077 @@ -1785,7 +1785,7 @@

    Parallel Performance

    - + Yices2 1.01904762 @@ -1799,7 +1799,7 @@

    Parallel Performance

    - + CVC4 1.00716846 @@ -1813,7 +1813,7 @@

    Parallel Performance

    - + Vampire 1.00640439 @@ -1827,7 +1827,7 @@

    Parallel Performance

    - + Bitwuzla 1.00612855 @@ -1841,7 +1841,7 @@

    Parallel Performance

    - + Bitwuzla 1.00357249 @@ -1855,7 +1855,7 @@

    Parallel Performance

    - + CVC4 1.0035488 @@ -1869,7 +1869,7 @@

    Parallel Performance

    - + Vampire 1.00160385 @@ -1883,7 +1883,7 @@

    Parallel Performance

    - + Vampire 1.0 @@ -1897,7 +1897,7 @@

    Parallel Performance

    - + Yices2 1.0 @@ -1911,7 +1911,7 @@

    Parallel Performance

    - + Yices2 1.0 @@ -1925,7 +1925,7 @@

    Parallel Performance

    - + Yices2 1.0 @@ -1939,7 +1939,7 @@

    Parallel Performance

    - + Yices2 1.0 @@ -1953,7 +1953,7 @@

    Parallel Performance

    - + Bitwuzla 1.0 @@ -1967,7 +1967,7 @@

    Parallel Performance

    - + Yices2 1.0 @@ -1981,7 +1981,7 @@

    Parallel Performance

    - + Alt-Ergo 1.0 @@ -1995,7 +1995,7 @@

    Parallel Performance

    - + Bitwuzla 1.0 @@ -2009,7 +2009,7 @@

    Parallel Performance

    - + Yices2 1.0 @@ -2038,7 +2038,7 @@

    SAT Performance

    - + CVC4 153.0 @@ -2052,7 +2052,7 @@

    SAT Performance

    - + CVC4 94.0 @@ -2066,7 +2066,7 @@

    SAT Performance

    - + CVC4 88.0 @@ -2080,7 +2080,7 @@

    SAT Performance

    - + CVC4 83.0 @@ -2094,7 +2094,7 @@

    SAT Performance

    - + CVC4 70.0 @@ -2108,7 +2108,7 @@

    SAT Performance

    - + CVC4 28.0 @@ -2122,7 +2122,7 @@

    SAT Performance

    - + CVC4 17.0 @@ -2136,7 +2136,7 @@

    SAT Performance

    - + CVC4 16.0 @@ -2150,7 +2150,7 @@

    SAT Performance

    - + CVC4 12.0 @@ -2164,7 +2164,7 @@

    SAT Performance

    - + CVC4 6.8 @@ -2178,7 +2178,7 @@

    SAT Performance

    - + CVC4 4.58333333 @@ -2192,7 +2192,7 @@

    SAT Performance

    - + CVC4 4.13636364 @@ -2206,7 +2206,7 @@

    SAT Performance

    - + CVC4 4.0 @@ -2220,7 +2220,7 @@

    SAT Performance

    - + CVC4 3.0 @@ -2234,7 +2234,7 @@

    SAT Performance

    - + Yices2 2.18181818 @@ -2248,7 +2248,7 @@

    SAT Performance

    - + SMTInterpol 2.0 @@ -2262,7 +2262,7 @@

    SAT Performance

    - + Yices2 1.8 @@ -2276,7 +2276,7 @@

    SAT Performance

    - + CVC4 1.5 @@ -2290,7 +2290,7 @@

    SAT Performance

    - + CVC4 1.33663366 @@ -2304,7 +2304,7 @@

    SAT Performance

    - + SMTInterpol 1.33333333 @@ -2318,7 +2318,7 @@

    SAT Performance

    - + CVC4 1.31707317 @@ -2332,7 +2332,7 @@

    SAT Performance

    - + CVC4 1.27272727 @@ -2346,7 +2346,7 @@

    SAT Performance

    - + CVC4 1.25 @@ -2360,7 +2360,7 @@

    SAT Performance

    - + CVC4 1.24004798 @@ -2374,7 +2374,7 @@

    SAT Performance

    - + CVC4 1.1875 @@ -2388,7 +2388,7 @@

    SAT Performance

    - + Vampire 1.179941 @@ -2402,7 +2402,7 @@

    SAT Performance

    - + CVC4 1.15104167 @@ -2416,7 +2416,7 @@

    SAT Performance

    - + Yices2 1.14858491 @@ -2430,7 +2430,7 @@

    SAT Performance

    - + COLIBRI 1.13953488 @@ -2444,7 +2444,7 @@

    SAT Performance

    - + Bitwuzla 1.13333333 @@ -2458,7 +2458,7 @@

    SAT Performance

    - + CVC4 1.12733072 @@ -2472,7 +2472,7 @@

    SAT Performance

    - + CVC4 1.1237721 @@ -2486,7 +2486,7 @@

    SAT Performance

    - + Yices2 1.08643617 @@ -2500,7 +2500,7 @@

    SAT Performance

    - + CVC4 1.06345178 @@ -2514,7 +2514,7 @@

    SAT Performance

    - + Bitwuzla 1.04444444 @@ -2528,7 +2528,7 @@

    SAT Performance

    - + Yices2 1.02912621 @@ -2542,7 +2542,7 @@

    SAT Performance

    - + Yices2 1.02475248 @@ -2556,7 +2556,7 @@

    SAT Performance

    - + OpenSMT 1.02380952 @@ -2570,7 +2570,7 @@

    SAT Performance

    - + CVC4 1.01470588 @@ -2584,7 +2584,7 @@

    SAT Performance

    - + Bitwuzla 1.00998403 @@ -2598,7 +2598,7 @@

    SAT Performance

    - + Yices2 1.00775194 @@ -2612,7 +2612,7 @@

    SAT Performance

    - + Bitwuzla 1.00260191 @@ -2626,7 +2626,7 @@

    SAT Performance

    - + Yices2 1.0 @@ -2640,7 +2640,7 @@

    SAT Performance

    - + Yices2 1.0 @@ -2654,7 +2654,7 @@

    SAT Performance

    - + Bitwuzla 1.0 @@ -2668,7 +2668,7 @@

    SAT Performance

    - + Yices2 1.0 @@ -2682,7 +2682,7 @@

    SAT Performance

    - + Yices2 1.0 @@ -2696,7 +2696,7 @@

    SAT Performance

    - + Yices2 1.0 @@ -2710,7 +2710,7 @@

    SAT Performance

    - + CVC4 1.0 @@ -2724,7 +2724,7 @@

    SAT Performance

    - + Yices2 1.0 @@ -2738,7 +2738,7 @@

    SAT Performance

    - + Yices2 1.0 @@ -2752,7 +2752,7 @@

    SAT Performance

    - + Yices2 1.0 @@ -2781,7 +2781,7 @@

    UNSAT Performance

    - + CVC4 300.0 @@ -2795,7 +2795,7 @@

    UNSAT Performance

    - + CVC4 223.0 @@ -2809,7 +2809,7 @@

    UNSAT Performance

    - + CVC4 116.0 @@ -2823,7 +2823,7 @@

    UNSAT Performance

    - + CVC4 8.0 @@ -2837,7 +2837,7 @@

    UNSAT Performance

    - + CVC4 6.0 @@ -2851,7 +2851,7 @@

    UNSAT Performance

    - + CVC4 4.6971831 @@ -2865,7 +2865,7 @@

    UNSAT Performance

    - + CVC4 4.0 @@ -2879,7 +2879,7 @@

    UNSAT Performance

    - + CVC4 3.0 @@ -2893,7 +2893,7 @@

    UNSAT Performance

    - + CVC4 3.0 @@ -2907,7 +2907,7 @@

    UNSAT Performance

    - + Vampire 2.5 @@ -2921,7 +2921,7 @@

    UNSAT Performance

    - + CVC4 2.0744186 @@ -2935,7 +2935,7 @@

    UNSAT Performance

    - + CVC4 2.0 @@ -2949,7 +2949,7 @@

    UNSAT Performance

    - + Yices2 1.63636364 @@ -2963,7 +2963,7 @@

    UNSAT Performance

    - + CVC4 1.53125 @@ -2977,7 +2977,7 @@

    UNSAT Performance

    - + SMT-RAT 1.5 @@ -2991,7 +2991,7 @@

    UNSAT Performance

    - + CVC4 1.5 @@ -3005,7 +3005,7 @@

    UNSAT Performance

    - + COLIBRI 1.4 @@ -3019,7 +3019,7 @@

    UNSAT Performance

    - + Vampire 1.35616438 @@ -3033,7 +3033,7 @@

    UNSAT Performance

    - + CVC4 1.33333333 @@ -3047,7 +3047,7 @@

    UNSAT Performance

    - + Vampire 1.31746032 @@ -3061,7 +3061,7 @@

    UNSAT Performance

    - + CVC4 1.26644737 @@ -3075,7 +3075,7 @@

    UNSAT Performance

    - + CVC4 1.25 @@ -3089,7 +3089,7 @@

    UNSAT Performance

    - + Yices2 1.2 @@ -3103,7 +3103,7 @@

    UNSAT Performance

    - + Yices2 1.14090131 @@ -3117,7 +3117,7 @@

    UNSAT Performance

    - + Vampire 1.11237323 @@ -3131,7 +3131,7 @@

    UNSAT Performance

    - + CVC4 1.09589041 @@ -3145,7 +3145,7 @@

    UNSAT Performance

    - + CVC4 1.09 @@ -3159,7 +3159,7 @@

    UNSAT Performance

    - + COLIBRI 1.07317073 @@ -3173,7 +3173,7 @@

    UNSAT Performance

    - + Yices2 1.06603774 @@ -3187,7 +3187,7 @@

    UNSAT Performance

    - + CVC4 1.04874652 @@ -3201,7 +3201,7 @@

    UNSAT Performance

    - + SMTInterpol 1.03768116 @@ -3215,7 +3215,7 @@

    UNSAT Performance

    - + Yices2 1.0375 @@ -3229,7 +3229,7 @@

    UNSAT Performance

    - + Vampire 1.03346856 @@ -3243,7 +3243,7 @@

    UNSAT Performance

    - + Vampire 1.03333333 @@ -3257,7 +3257,7 @@

    UNSAT Performance

    - + CVC4 1.03322785 @@ -3271,7 +3271,7 @@

    UNSAT Performance

    - + OpenSMT 1.02659574 @@ -3285,7 +3285,7 @@

    UNSAT Performance

    - + CVC4 1.02622951 @@ -3299,7 +3299,7 @@

    UNSAT Performance

    - + Vampire 1.02121212 @@ -3313,7 +3313,7 @@

    UNSAT Performance

    - + CVC4 1.01910828 @@ -3327,7 +3327,7 @@

    UNSAT Performance

    - + Bitwuzla 1.01496259 @@ -3341,7 +3341,7 @@

    UNSAT Performance

    - + Yices2 1.01380042 @@ -3355,7 +3355,7 @@

    UNSAT Performance

    - + CVC4 1.01316708 @@ -3369,7 +3369,7 @@

    UNSAT Performance

    - + Bitwuzla 1.01020408 @@ -3383,7 +3383,7 @@

    UNSAT Performance

    - + Yices2 1.00925926 @@ -3397,7 +3397,7 @@

    UNSAT Performance

    - + Bitwuzla 1.0056926 @@ -3411,7 +3411,7 @@

    UNSAT Performance

    - + CVC4 1.00416667 @@ -3425,7 +3425,7 @@

    UNSAT Performance

    - + CVC4 1.00389105 @@ -3439,7 +3439,7 @@

    UNSAT Performance

    - + Bitwuzla 1.00382135 @@ -3453,7 +3453,7 @@

    UNSAT Performance

    - + CVC4 1.0035488 @@ -3467,7 +3467,7 @@

    UNSAT Performance

    - + Vampire 1.00160385 @@ -3481,7 +3481,7 @@

    UNSAT Performance

    - + Yices2 1.0 @@ -3495,7 +3495,7 @@

    UNSAT Performance

    - + CVC4 1.0 @@ -3509,7 +3509,7 @@

    UNSAT Performance

    - + CVC4 1.0 @@ -3523,7 +3523,7 @@

    UNSAT Performance

    - + Yices2 1.0 @@ -3537,7 +3537,7 @@

    UNSAT Performance

    - + Vampire 1.0 @@ -3551,7 +3551,7 @@

    UNSAT Performance

    - + Yices2 1.0 @@ -3565,7 +3565,7 @@

    UNSAT Performance

    - + CVC4 1.0 @@ -3579,7 +3579,7 @@

    UNSAT Performance

    - + Yices2 1.0 @@ -3593,7 +3593,7 @@

    UNSAT Performance

    - + Yices2 1.0 @@ -3607,7 +3607,7 @@

    UNSAT Performance

    - + Yices2 1.0 @@ -3621,7 +3621,7 @@

    UNSAT Performance

    - + Bitwuzla 1.0 @@ -3635,7 +3635,7 @@

    UNSAT Performance

    - + CVC4 1.0 @@ -3649,7 +3649,7 @@

    UNSAT Performance

    - + Bitwuzla 1.0 @@ -3663,7 +3663,7 @@

    UNSAT Performance

    - + Yices2 1.0 @@ -3677,7 +3677,7 @@

    UNSAT Performance

    - + veriT 1.0 @@ -3706,7 +3706,7 @@

    24s Performance

    - + CVC4 321.0 @@ -3720,7 +3720,7 @@

    24s Performance

    - + CVC4 194.0 @@ -3734,7 +3734,7 @@

    24s Performance

    - + CVC4 116.0 @@ -3748,7 +3748,7 @@

    24s Performance

    - + CVC4 93.0 @@ -3762,7 +3762,7 @@

    24s Performance

    - + CVC4 20.5 @@ -3776,7 +3776,7 @@

    24s Performance

    - + CVC4 19.0 @@ -3790,7 +3790,7 @@

    24s Performance

    - + CVC4 14.0 @@ -3804,7 +3804,7 @@

    24s Performance

    - + CVC4 8.0 @@ -3818,7 +3818,7 @@

    24s Performance

    - + CVC4 5.0 @@ -3832,7 +3832,7 @@

    24s Performance

    - + CVC4 4.07142857 @@ -3846,7 +3846,7 @@

    24s Performance

    - + CVC4 4.0130719 @@ -3860,7 +3860,7 @@

    24s Performance

    - + CVC4 4.0 @@ -3874,7 +3874,7 @@

    24s Performance

    - + Vampire 3.67741935 @@ -3888,7 +3888,7 @@

    24s Performance

    - + CVC4 2.33480176 @@ -3902,7 +3902,7 @@

    24s Performance

    - + CVC4 2.28571429 @@ -3916,7 +3916,7 @@

    24s Performance

    - + CVC4 2.0 @@ -3930,7 +3930,7 @@

    24s Performance

    - + CVC4 1.8608838 @@ -3944,7 +3944,7 @@

    24s Performance

    - + CVC4 1.85981308 @@ -3958,7 +3958,7 @@

    24s Performance

    - + Yices2 1.54545455 @@ -3972,7 +3972,7 @@

    24s Performance

    - + Vampire 1.53875969 @@ -3986,7 +3986,7 @@

    24s Performance

    - + Yices2 1.51961342 @@ -4000,7 +4000,7 @@

    24s Performance

    - + Yices2 1.5 @@ -4014,7 +4014,7 @@

    24s Performance

    - + Yices2 1.49776786 @@ -4028,7 +4028,7 @@

    24s Performance

    - + CVC4 1.45454545 @@ -4042,7 +4042,7 @@

    24s Performance

    - + CVC4 1.30263158 @@ -4056,7 +4056,7 @@

    24s Performance

    - + Vampire 1.3 @@ -4070,7 +4070,7 @@

    24s Performance

    - + Vampire 1.26984127 @@ -4084,7 +4084,7 @@

    24s Performance

    - + COLIBRI 1.22222222 @@ -4098,7 +4098,7 @@

    24s Performance

    - + CVC4 1.22222222 @@ -4112,7 +4112,7 @@

    24s Performance

    - + COLIBRI 1.18072289 @@ -4126,7 +4126,7 @@

    24s Performance

    - + Yices2 1.15283541 @@ -4140,7 +4140,7 @@

    24s Performance

    - + Yices2 1.14460512 @@ -4154,7 +4154,7 @@

    24s Performance

    - + Yices2 1.13513514 @@ -4168,7 +4168,7 @@

    24s Performance

    - + Yices2 1.12941176 @@ -4182,7 +4182,7 @@

    24s Performance

    - + CVC4 1.12184874 @@ -4196,7 +4196,7 @@

    24s Performance

    - + CVC4 1.11111111 @@ -4210,7 +4210,7 @@

    24s Performance

    - + CVC4 1.10877863 @@ -4224,7 +4224,7 @@

    24s Performance

    - + Vampire 1.10758197 @@ -4238,7 +4238,7 @@

    24s Performance

    - + Bitwuzla 1.05856833 @@ -4252,7 +4252,7 @@

    24s Performance

    - + CVC4 1.05471478 @@ -4266,7 +4266,7 @@

    24s Performance

    - + Yices2 1.05405405 @@ -4280,7 +4280,7 @@

    24s Performance

    - + Bitwuzla 1.05291005 @@ -4294,7 +4294,7 @@

    24s Performance

    - + SMTInterpol 1.05263158 @@ -4308,7 +4308,7 @@

    24s Performance

    - + Alt-Ergo 1.0390625 @@ -4322,7 +4322,7 @@

    24s Performance

    - + CVC4 1.03623188 @@ -4336,7 +4336,7 @@

    24s Performance

    - + CVC4 1.02955665 @@ -4350,7 +4350,7 @@

    24s Performance

    - + CVC4 1.02941176 @@ -4364,7 +4364,7 @@

    24s Performance

    - + CVC4 1.02882663 @@ -4378,7 +4378,7 @@

    24s Performance

    - + CVC4 1.02753962 @@ -4392,7 +4392,7 @@

    24s Performance

    - + Yices2 1.01960784 @@ -4406,7 +4406,7 @@

    24s Performance

    - + Bitwuzla 1.0177544 @@ -4420,7 +4420,7 @@

    24s Performance

    - + Yices2 1.01689189 @@ -4434,7 +4434,7 @@

    24s Performance

    - + CVC4 1.01538462 @@ -4448,7 +4448,7 @@

    24s Performance

    - + Vampire 1.01334445 @@ -4462,7 +4462,7 @@

    24s Performance

    - + Bitwuzla 1.00822168 @@ -4476,7 +4476,7 @@

    24s Performance

    - + Bitwuzla 1.00779221 @@ -4490,7 +4490,7 @@

    24s Performance

    - + Vampire 1.0 @@ -4504,7 +4504,7 @@

    24s Performance

    - + Yices2 1.0 @@ -4518,7 +4518,7 @@

    24s Performance

    - + Yices2 1.0 @@ -4532,7 +4532,7 @@

    24s Performance

    - + COLIBRI 1.0 @@ -4546,7 +4546,7 @@

    24s Performance

    - + Yices2 1.0 @@ -4560,7 +4560,7 @@

    24s Performance

    - + Yices2 1.0 @@ -4574,7 +4574,7 @@

    24s Performance

    - + Bitwuzla 1.0 @@ -4588,7 +4588,7 @@

    24s Performance

    - + Yices2 1.0 @@ -4602,7 +4602,7 @@

    24s Performance

    - + Alt-Ergo 1.0 @@ -4634,7 +4634,6 @@

    24s Performance

    - + - diff --git a/archive/2020/results/biggest-lead-unsat-core.html b/archive/2020/results/biggest-lead-unsat-core.html index dc21f21e..ec62150f 100644 --- a/archive/2020/results/biggest-lead-unsat-core.html +++ b/archive/2020/results/biggest-lead-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -90,35 +90,35 @@

    Winners

    - + - - + + - - - - + + + + - + - - + + - - - - + + + +
    Sequential PerformanceParallel Performance
    - + CVC4-uc - + - + CVC4-uc - +

    Sequential Performance

    @@ -136,7 +136,7 @@

    Sequential Performance

    - + CVC4-uc 244199.0 @@ -150,7 +150,7 @@

    Sequential Performance

    - + CVC4-uc 155823.0 @@ -164,7 +164,7 @@

    Sequential Performance

    - + CVC4-uc 78597.0 @@ -178,7 +178,7 @@

    Sequential Performance

    - + CVC4-uc 61865.0 @@ -192,7 +192,7 @@

    Sequential Performance

    - + CVC4-uc 13142.0 @@ -206,7 +206,7 @@

    Sequential Performance

    - + CVC4-uc 9721.0 @@ -220,7 +220,7 @@

    Sequential Performance

    - + CVC4-uc 5541.0 @@ -234,7 +234,7 @@

    Sequential Performance

    - + CVC4-uc 3668.0 @@ -248,7 +248,7 @@

    Sequential Performance

    - + CVC4-uc 1949.0 @@ -262,7 +262,7 @@

    Sequential Performance

    - + CVC4-uc 49.0 @@ -276,7 +276,7 @@

    Sequential Performance

    - + CVC4-uc 22.0 @@ -290,7 +290,7 @@

    Sequential Performance

    - + CVC4-uc 7.0 @@ -304,7 +304,7 @@

    Sequential Performance

    - + CVC4-uc 5.0 @@ -318,7 +318,7 @@

    Sequential Performance

    - + CVC4-uc 3.0 @@ -332,7 +332,7 @@

    Sequential Performance

    - + CVC4-uc 2.45844247 @@ -346,7 +346,7 @@

    Sequential Performance

    - + CVC4-uc 1.94811801 @@ -360,7 +360,7 @@

    Sequential Performance

    - + CVC4-uc 1.61902505 @@ -374,7 +374,7 @@

    Sequential Performance

    - + CVC4-uc 1.42871356 @@ -388,7 +388,7 @@

    Sequential Performance

    - + Yices2 1.38658296 @@ -402,7 +402,7 @@

    Sequential Performance

    - + CVC4-uc 1.34131289 @@ -416,7 +416,7 @@

    Sequential Performance

    - + Bitwuzla 1.24390244 @@ -430,7 +430,7 @@

    Sequential Performance

    - + Yices2 1.18633865 @@ -444,7 +444,7 @@

    Sequential Performance

    - + CVC4-uc 1.18558126 @@ -458,7 +458,7 @@

    Sequential Performance

    - + Yices2 1.14223429 @@ -472,7 +472,7 @@

    Sequential Performance

    - + SMTInterpol 1.13333333 @@ -486,7 +486,7 @@

    Sequential Performance

    - + CVC4-uc 1.05116279 @@ -500,7 +500,7 @@

    Sequential Performance

    - + Bitwuzla 1.04329159 @@ -514,7 +514,7 @@

    Sequential Performance

    - + Bitwuzla 1.03524962 @@ -528,7 +528,7 @@

    Sequential Performance

    - + Yices2 1.02658478 @@ -542,7 +542,7 @@

    Sequential Performance

    - + CVC4-uc 1.01694915 @@ -556,7 +556,7 @@

    Sequential Performance

    - + Bitwuzla 1.01044508 @@ -570,7 +570,7 @@

    Sequential Performance

    - + SMTInterpol 1.00651484 @@ -584,7 +584,7 @@

    Sequential Performance

    - + Yices2 1.00143701 @@ -598,7 +598,7 @@

    Sequential Performance

    - + Bitwuzla 1.00136772 @@ -612,7 +612,7 @@

    Sequential Performance

    - + Yices2 1.0 @@ -626,7 +626,7 @@

    Sequential Performance

    - + Yices2 1.0 @@ -655,7 +655,7 @@

    Parallel Performance

    - + CVC4-uc 244199.0 @@ -669,7 +669,7 @@

    Parallel Performance

    - + CVC4-uc 155823.0 @@ -683,7 +683,7 @@

    Parallel Performance

    - + CVC4-uc 78597.0 @@ -697,7 +697,7 @@

    Parallel Performance

    - + CVC4-uc 61865.0 @@ -711,7 +711,7 @@

    Parallel Performance

    - + CVC4-uc 13142.0 @@ -725,7 +725,7 @@

    Parallel Performance

    - + CVC4-uc 9721.0 @@ -739,7 +739,7 @@

    Parallel Performance

    - + CVC4-uc 5541.0 @@ -753,7 +753,7 @@

    Parallel Performance

    - + CVC4-uc 3668.0 @@ -767,7 +767,7 @@

    Parallel Performance

    - + CVC4-uc 1949.0 @@ -781,7 +781,7 @@

    Parallel Performance

    - + CVC4-uc 49.0 @@ -795,7 +795,7 @@

    Parallel Performance

    - + CVC4-uc 22.0 @@ -809,7 +809,7 @@

    Parallel Performance

    - + CVC4-uc 7.0 @@ -823,7 +823,7 @@

    Parallel Performance

    - + CVC4-uc 5.0 @@ -837,7 +837,7 @@

    Parallel Performance

    - + CVC4-uc 3.0 @@ -851,7 +851,7 @@

    Parallel Performance

    - + CVC4-uc 2.45844247 @@ -865,7 +865,7 @@

    Parallel Performance

    - + CVC4-uc 1.94811801 @@ -879,7 +879,7 @@

    Parallel Performance

    - + CVC4-uc 1.61902505 @@ -893,7 +893,7 @@

    Parallel Performance

    - + CVC4-uc 1.42871356 @@ -907,7 +907,7 @@

    Parallel Performance

    - + Yices2 1.38658296 @@ -921,7 +921,7 @@

    Parallel Performance

    - + CVC4-uc 1.34131289 @@ -935,7 +935,7 @@

    Parallel Performance

    - + Bitwuzla 1.24390244 @@ -949,7 +949,7 @@

    Parallel Performance

    - + Yices2 1.18633865 @@ -963,7 +963,7 @@

    Parallel Performance

    - + CVC4-uc 1.18558126 @@ -977,7 +977,7 @@

    Parallel Performance

    - + Yices2 1.14223429 @@ -991,7 +991,7 @@

    Parallel Performance

    - + SMTInterpol 1.13333333 @@ -1005,7 +1005,7 @@

    Parallel Performance

    - + CVC4-uc 1.05116279 @@ -1019,7 +1019,7 @@

    Parallel Performance

    - + Bitwuzla 1.04329159 @@ -1033,7 +1033,7 @@

    Parallel Performance

    - + Bitwuzla 1.03524962 @@ -1047,7 +1047,7 @@

    Parallel Performance

    - + Yices2 1.02658478 @@ -1061,7 +1061,7 @@

    Parallel Performance

    - + CVC4-uc 1.01694915 @@ -1075,7 +1075,7 @@

    Parallel Performance

    - + Bitwuzla 1.01044508 @@ -1089,7 +1089,7 @@

    Parallel Performance

    - + SMTInterpol 1.00651484 @@ -1103,7 +1103,7 @@

    Parallel Performance

    - + Yices2 1.00143701 @@ -1117,7 +1117,7 @@

    Parallel Performance

    - + Bitwuzla 1.00136772 @@ -1131,7 +1131,7 @@

    Parallel Performance

    - + Yices2 1.0 @@ -1145,7 +1145,7 @@

    Parallel Performance

    - + Yices2 1.0 @@ -1183,7 +1183,6 @@

    Parallel Performance

    - + - diff --git a/archive/2020/results/bv-incremental.html b/archive/2020/results/bv-incremental.html index bc32bdd2..d8e16911 100644 --- a/archive/2020/results/bv-incremental.html +++ b/archive/2020/results/bv-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    BV (Incremental Track)

    Competition results for the BV - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    BV (Incremental Track)

    Parallel Performance CVC4-inc - - + + @@ -124,7 +124,7 @@

    BV (Incremental Track)

    - + 2019-Z3n 0 366237920.0517924.83722336 @@ -133,7 +133,7 @@

    BV (Incremental Track)

    - + CVC4-inc 0 3602710701.61410700.34228297 @@ -142,7 +142,7 @@

    BV (Incremental Track)

    - + z3n 0 346138763.1558762.33542437 @@ -151,7 +151,7 @@

    BV (Incremental Track)

    - + UltimateEliminator+MathSAT 0 13305126.41759.696255510 @@ -175,7 +175,6 @@

    BV (Incremental Track)

    - + - diff --git a/archive/2020/results/bv-single-query.html b/archive/2020/results/bv-single-query.html index ff6b88b5..4012d077 100644 --- a/archive/2020/results/bv-single-query.html +++ b/archive/2020/results/bv-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    BV (Single Query Track)

    Competition results for the BV - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    BV (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) CVC4CVC4CVC4 - - + + CVC4 - - + + CVC4 - + @@ -131,7 +131,7 @@

    BV (Single Query Track)

    - + 2019-Par4n 0 659 @@ -142,7 +142,7 @@

    BV (Single Query Track)

    - + CVC4 0 612 @@ -153,7 +153,7 @@

    BV (Single Query Track)

    - + Bitwuzla-fixedn 0 595 @@ -164,7 +164,7 @@

    BV (Single Query Track)

    - + z3n 0 576 @@ -175,7 +175,7 @@

    BV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 227 @@ -186,7 +186,7 @@

    BV (Single Query Track)

    - + Bitwuzla 40 569 @@ -208,7 +208,7 @@

    BV (Single Query Track)

    - + 2019-Par4n 0 66354497.47243423.9356631984653333 @@ -217,7 +217,7 @@

    BV (Single Query Track)

    - + CVC4 0 612137860.739138917.7116121674458484 @@ -226,7 +226,7 @@

    BV (Single Query Track)

    - + Bitwuzla-fixedn 0 598133286.041125513.7565981934059898 @@ -235,7 +235,7 @@

    BV (Single Query Track)

    - + z3n 0 576144211.864144213.127576173403120116 @@ -244,7 +244,7 @@

    BV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 227126646.048125544.922713214469103 @@ -253,7 +253,7 @@

    BV (Single Query Track)

    - + Bitwuzla 40 571108452.381105260.36357118338812585 @@ -273,7 +273,7 @@

    BV (Single Query Track)

    - + 2019-Par4n 0 19820522.28614179.19198198049833 @@ -282,7 +282,7 @@

    BV (Single Query Track)

    - + Bitwuzla-fixedn 0 19325656.2821863.603193193050398 @@ -291,7 +291,7 @@

    BV (Single Query Track)

    - + z3n 0 17339342.09139342.391731730523116 @@ -300,7 +300,7 @@

    BV (Single Query Track)

    - + CVC4 0 16782986.99983940.926167167052984 @@ -309,7 +309,7 @@

    BV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 1374566.07373996.17113130683103 @@ -318,7 +318,7 @@

    BV (Single Query Track)

    - + Bitwuzla 14 18313745.37413484.816183183051385 @@ -338,7 +338,7 @@

    BV (Single Query Track)

    - + 2019-Par4n 0 46523175.18618444.745465046523133 @@ -347,7 +347,7 @@

    BV (Single Query Track)

    - + CVC4 0 44544073.74144176.785445044525184 @@ -356,7 +356,7 @@

    BV (Single Query Track)

    - + Bitwuzla-fixedn 0 40596829.76292850.154405040529198 @@ -365,7 +365,7 @@

    BV (Single Query Track)

    - + z3n 0 40394069.77394070.7374030403293116 @@ -374,7 +374,7 @@

    BV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 21449652.68849132.1282140214482103 @@ -383,7 +383,7 @@

    BV (Single Query Track)

    - + Bitwuzla 26 38883907.00780975.547388038830885 @@ -403,7 +403,7 @@

    BV (Single Query Track)

    - + 2019-Par4n 0 6442652.8221651.776441864585252 @@ -412,7 +412,7 @@

    BV (Single Query Track)

    - + Bitwuzla-fixedn 0 5654088.433645.216565181384131131 @@ -421,7 +421,7 @@

    BV (Single Query Track)

    - + z3n 0 5593606.9423607.062559171388137136 @@ -430,7 +430,7 @@

    BV (Single Query Track)

    - + CVC4 0 5294660.8444647.14152999430167167 @@ -439,7 +439,7 @@

    BV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 2265009.9764194.23322613213470109 @@ -448,7 +448,7 @@

    BV (Single Query Track)

    - + Bitwuzla 40 5523170.0072858.553552181371144104 @@ -472,7 +472,6 @@

    BV (Single Query Track)

    - + - diff --git a/archive/2020/results/bv-unsat-core.html b/archive/2020/results/bv-unsat-core.html index 4fe688fc..a806d46f 100644 --- a/archive/2020/results/bv-unsat-core.html +++ b/archive/2020/results/bv-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    BV (Unsat Core Track)

    Competition results for the BV - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    BV (Unsat Core Track)

    Sequential PerformanceParallel Performance CVC4-ucCVC4-uc - - + + @@ -126,7 +126,7 @@

    BV (Unsat Core Track)

    - + CVC4-uc 0 48 @@ -137,7 +137,7 @@

    BV (Unsat Core Track)

    - + z3n 0 35 @@ -148,7 +148,7 @@

    BV (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -170,7 +170,7 @@

    BV (Unsat Core Track)

    - + CVC4-uc 0 487508.587509.3124 @@ -179,7 +179,7 @@

    BV (Unsat Core Track)

    - + z3n 0 358881.2638882.424 @@ -188,7 +188,7 @@

    BV (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 032151.15531806.2425 @@ -212,7 +212,6 @@

    BV (Unsat Core Track)

    - + - diff --git a/archive/2020/results/bvfp-incremental.html b/archive/2020/results/bvfp-incremental.html index 94469956..fd60b442 100644 --- a/archive/2020/results/bvfp-incremental.html +++ b/archive/2020/results/bvfp-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    BVFP (Incremental Track)

    Competition results for the BVFP - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    BVFP (Incremental Track)

    Parallel Performance CVC4-inc - - + + @@ -124,7 +124,7 @@

    BVFP (Incremental Track)

    - + 2019-CVC4-incn 0 2601200.01200.01981 @@ -133,7 +133,7 @@

    BVFP (Incremental Track)

    - + CVC4-inc 0 2601200.01200.01981 @@ -142,7 +142,7 @@

    BVFP (Incremental Track)

    - + UltimateEliminator+MathSAT 0 23113.9410.7752270 @@ -166,7 +166,6 @@

    BVFP (Incremental Track)

    - + - diff --git a/archive/2020/results/bvfp-single-query.html b/archive/2020/results/bvfp-single-query.html index 76619954..1484174a 100644 --- a/archive/2020/results/bvfp-single-query.html +++ b/archive/2020/results/bvfp-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    BVFP (Single Query Track)

    Competition results for the BVFP - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    BVFP (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) CVC4CVC4CVC4 - - + + CVC4 - - + + CVC4 - + @@ -131,7 +131,7 @@

    BVFP (Single Query Track)

    - + 2019-Z3n 0 181 @@ -142,7 +142,7 @@

    BVFP (Single Query Track)

    - + CVC4 0 113 @@ -153,7 +153,7 @@

    BVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 27 @@ -175,7 +175,7 @@

    BVFP (Single Query Track)

    - + 2019-Z3n 0 18133982.22633924.06181169124324 @@ -184,7 +184,7 @@

    BVFP (Single Query Track)

    - + CVC4 0 11338479.30538479.356113109411132 @@ -193,7 +193,7 @@

    BVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 271978.2661731.256272341971 @@ -213,7 +213,7 @@

    BVFP (Single Query Track)

    - + 2019-Z3n 0 16922430.83722430.00316916905524 @@ -222,7 +222,7 @@

    BVFP (Single Query Track)

    - + CVC4 0 10934834.83734834.797109109011532 @@ -231,7 +231,7 @@

    BVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 23660.79450.771232302011 @@ -251,7 +251,7 @@

    BVFP (Single Query Track)

    - + 2019-Z3n 0 12401.902401.9311201221224 @@ -260,7 +260,7 @@

    BVFP (Single Query Track)

    - + CVC4 0 44.8884.88540422032 @@ -269,7 +269,7 @@

    BVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 444.14430.3544042201 @@ -289,7 +289,7 @@

    BVFP (Single Query Track)

    - + 2019-Z3n 0 1551797.9341796.47715514696956 @@ -298,7 +298,7 @@

    BVFP (Single Query Track)

    - + CVC4 0 113847.305847.356113109411132 @@ -307,7 +307,7 @@

    BVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 27802.266555.256272341971 @@ -331,7 +331,6 @@

    BVFP (Single Query Track)

    - + - diff --git a/archive/2020/results/bvfplra-single-query.html b/archive/2020/results/bvfplra-single-query.html index 6676541a..0089204c 100644 --- a/archive/2020/results/bvfplra-single-query.html +++ b/archive/2020/results/bvfplra-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    BVFPLRA (Single Query Track)

    Competition results for the BVFPLRA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    BVFPLRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) CVC4CVC4CVC4 - - + + CVC4 - - + + CVC4 - + @@ -131,7 +131,7 @@

    BVFPLRA (Single Query Track)

    - + CVC4 0 92 @@ -142,7 +142,7 @@

    BVFPLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -164,7 +164,7 @@

    BVFPLRA (Single Query Track)

    - + CVC4 0 9267236.23167236.2928759356 @@ -173,7 +173,7 @@

    BVFPLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0618.657415.8960001850 @@ -193,7 +193,7 @@

    BVFPLRA (Single Query Track)

    - + CVC4 0 876018.5146018.494878709856 @@ -202,7 +202,7 @@

    BVFPLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0315.227208.5180001850 @@ -222,7 +222,7 @@

    BVFPLRA (Single Query Track)

    - + CVC4 0 59.0949.09350518056 @@ -231,7 +231,7 @@

    BVFPLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 016.23811.0680001850 @@ -251,7 +251,7 @@

    BVFPLRA (Single Query Track)

    - + CVC4 0 921380.2311380.2928759356 @@ -260,7 +260,7 @@

    BVFPLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0618.657415.8960001850 @@ -284,7 +284,6 @@

    BVFPLRA (Single Query Track)

    - + - diff --git a/archive/2020/results/bvfplra-unsat-core.html b/archive/2020/results/bvfplra-unsat-core.html index 795fd01c..45857612 100644 --- a/archive/2020/results/bvfplra-unsat-core.html +++ b/archive/2020/results/bvfplra-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    BVFPLRA (Unsat Core Track)

    Competition results for the BVFPLRA - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    BVFPLRA (Unsat Core Track)

    Sequential PerformanceParallel Performance CVC4-ucCVC4-uc - - + + @@ -126,7 +126,7 @@

    BVFPLRA (Unsat Core Track)

    - + CVC4-uc 0 21 @@ -137,7 +137,7 @@

    BVFPLRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -159,7 +159,7 @@

    BVFPLRA (Unsat Core Track)

    - + CVC4-uc 0 211.8511.8520 @@ -168,7 +168,7 @@

    BVFPLRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 03.2322.2120 @@ -192,7 +192,6 @@

    BVFPLRA (Unsat Core Track)

    - + - diff --git a/archive/2020/results/fp-single-query.html b/archive/2020/results/fp-single-query.html index 48610c2d..4e151594 100644 --- a/archive/2020/results/fp-single-query.html +++ b/archive/2020/results/fp-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    FP (Single Query Track)

    Competition results for the FP - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    FP (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) CVC4CVC4CVC4 - - + + CVC4 - - + + CVC4 - + @@ -131,7 +131,7 @@

    FP (Single Query Track)

    - + 2019-Z3n 0 813 @@ -142,7 +142,7 @@

    FP (Single Query Track)

    - + z3n 0 806 @@ -153,7 +153,7 @@

    FP (Single Query Track)

    - + CVC4 0 680 @@ -164,7 +164,7 @@

    FP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 152 @@ -186,7 +186,7 @@

    FP (Single Query Track)

    - + 2019-Z3n 0 813218824.549218829.32481325788173124 @@ -195,7 +195,7 @@

    FP (Single Query Track)

    - + z3n 0 806224915.435224881.63680624782180163 @@ -204,7 +204,7 @@

    FP (Single Query Track)

    - + CVC4 0 680265340.503265336.90768014666306213 @@ -213,7 +213,7 @@

    FP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 1523352.292337.127152111418340 @@ -233,7 +233,7 @@

    FP (Single Query Track)

    - + 2019-Z3n 0 251437.9351438.21825250961124 @@ -242,7 +242,7 @@

    FP (Single Query Track)

    - + z3n 0 242180.4372180.47824240962163 @@ -251,7 +251,7 @@

    FP (Single Query Track)

    - + CVC4 0 1413218.91513218.91214140972213 @@ -260,7 +260,7 @@

    FP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 1190.77968.864111109750 @@ -280,7 +280,7 @@

    FP (Single Query Track)

    - + 2019-Z3n 0 78884142.15384146.6447880788198124 @@ -289,7 +289,7 @@

    FP (Single Query Track)

    - + z3n 0 78291782.79791748.9487820782204163 @@ -298,7 +298,7 @@

    FP (Single Query Track)

    - + CVC4 0 666109321.587109317.9946660666320213 @@ -307,7 +307,7 @@

    FP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 1412862.781989.83614101418450 @@ -327,7 +327,7 @@

    FP (Single Query Track)

    - + z3n 0 6997794.0067783.35269921678287272 @@ -336,7 +336,7 @@

    FP (Single Query Track)

    - + 2019-Z3n 0 6888210.3278208.94568818670298249 @@ -345,7 +345,7 @@

    FP (Single Query Track)

    - + CVC4 0 6137785.677780.25261314599373280 @@ -354,7 +354,7 @@

    FP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 1523352.292337.127152111418340 @@ -378,7 +378,6 @@

    FP (Single Query Track)

    - + - diff --git a/archive/2020/results/fplra-single-query.html b/archive/2020/results/fplra-single-query.html index fdf4ebd0..7e41025d 100644 --- a/archive/2020/results/fplra-single-query.html +++ b/archive/2020/results/fplra-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    FPLRA (Single Query Track)

    Competition results for the FPLRA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    FPLRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) CVC4CVC4CVC4 - - + + — - - + + CVC4 - + @@ -131,7 +131,7 @@

    FPLRA (Single Query Track)

    - + CVC4 0 15 @@ -142,7 +142,7 @@

    FPLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -164,7 +164,7 @@

    FPLRA (Single Query Track)

    - + CVC4 0 1512942.8812942.924151501210 @@ -173,7 +173,7 @@

    FPLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 086.70460.352000270 @@ -193,7 +193,7 @@

    FPLRA (Single Query Track)

    - + CVC4 0 152140.2352140.279151501210 @@ -202,7 +202,7 @@

    FPLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 051.56735.689000270 @@ -222,7 +222,7 @@

    FPLRA (Single Query Track)

    - + CVC4 0 00.00.00002710 @@ -231,7 +231,7 @@

    FPLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.0000270 @@ -251,7 +251,7 @@

    FPLRA (Single Query Track)

    - + CVC4 0 13295.95295.945131301412 @@ -260,7 +260,7 @@

    FPLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 086.70460.352000270 @@ -284,7 +284,6 @@

    FPLRA (Single Query Track)

    - + - diff --git a/archive/2020/results/largest-contribution-incremental.html b/archive/2020/results/largest-contribution-incremental.html index 4138166a..f97e1752 100644 --- a/archive/2020/results/largest-contribution-incremental.html +++ b/archive/2020/results/largest-contribution-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -90,27 +90,27 @@

    Winners

    - - + + - - - - + + + + - - + + - - - - + + + +
    Parallel Performance
    - + CVC4-inc - +
    @@ -130,7 +130,7 @@

    Parallel Performance

    - + CVC4-inc 0.07036068 @@ -144,7 +144,7 @@

    Parallel Performance

    - + CVC4-inc 0.00481985 @@ -158,7 +158,7 @@

    Parallel Performance

    - + Yices2 incremental 0.00459725 @@ -172,7 +172,7 @@

    Parallel Performance

    - + Yices2 incremental 0.00210543 @@ -186,7 +186,7 @@

    Parallel Performance

    - + Bitwuzla 0.00091747 @@ -200,7 +200,7 @@

    Parallel Performance

    - + Yices2 incremental 0.00079132 @@ -214,7 +214,7 @@

    Parallel Performance

    - + Yices2 incremental 0.0006343 @@ -228,7 +228,7 @@

    Parallel Performance

    - + CVC4-inc 0.00031699 @@ -242,7 +242,7 @@

    Parallel Performance

    - + Bitwuzla 0.00019894 @@ -256,7 +256,7 @@

    Parallel Performance

    - + Yices2 incremental 0.00017244 @@ -270,7 +270,7 @@

    Parallel Performance

    - + SMTInterpol 8.954e-05 @@ -284,7 +284,7 @@

    Parallel Performance

    - + CVC4-inc 8.032e-05 @@ -298,7 +298,7 @@

    Parallel Performance

    - + Yices2 incremental 0.0 @@ -312,7 +312,7 @@

    Parallel Performance

    - + SMTInterpol 0.0 @@ -350,7 +350,6 @@

    Parallel Performance

    - + - diff --git a/archive/2020/results/largest-contribution-model-validation.html b/archive/2020/results/largest-contribution-model-validation.html index c1bcaaf0..31d33284 100644 --- a/archive/2020/results/largest-contribution-model-validation.html +++ b/archive/2020/results/largest-contribution-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -90,35 +90,35 @@

    Winners

    - + - - + + - - - - + + + + - + - - + + - - - - + + + +
    Sequential PerformanceParallel Performance
    - + Bitwuzla - + - + Bitwuzla - +

    Sequential Performance

    @@ -136,7 +136,7 @@

    Sequential Performance

    - + CVC4-mv 0.00659857 @@ -150,7 +150,7 @@

    Sequential Performance

    - + Yices2 Model Validation 0.0042494 @@ -164,7 +164,7 @@

    Sequential Performance

    - + Bitwuzla 0.00258886 @@ -178,7 +178,7 @@

    Sequential Performance

    - + CVC4-mv 0.00033284 @@ -192,7 +192,7 @@

    Sequential Performance

    - + Yices2 Model Validation 0.000245 @@ -206,7 +206,7 @@

    Sequential Performance

    - + Yices2 Model Validation 0.0 @@ -235,7 +235,7 @@

    Parallel Performance

    - + CVC4-mv 0.00659857 @@ -249,7 +249,7 @@

    Parallel Performance

    - + Yices2 Model Validation 0.0042494 @@ -263,7 +263,7 @@

    Parallel Performance

    - + Bitwuzla 0.00245218 @@ -277,7 +277,7 @@

    Parallel Performance

    - + CVC4-mv 0.00033284 @@ -291,7 +291,7 @@

    Parallel Performance

    - + Yices2 Model Validation 0.000245 @@ -305,7 +305,7 @@

    Parallel Performance

    - + Yices2 Model Validation 0.0 @@ -343,7 +343,6 @@

    Parallel Performance

    - + - diff --git a/archive/2020/results/largest-contribution-single-query.html b/archive/2020/results/largest-contribution-single-query.html index 0dc61989..b4c391e7 100644 --- a/archive/2020/results/largest-contribution-single-query.html +++ b/archive/2020/results/largest-contribution-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -90,59 +90,59 @@

    Winners

    - + - - + + - - + + - - + + - - + + - + - + - - + + - - + + - - + + - - + + - +
    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24 seconds Performance (parallel)
    - + CVC4 - + - + CVC4 - + - + CVC4 - + - + Yices2 - + - + Yices2 - +

    Sequential Performance

    @@ -160,7 +160,7 @@

    Sequential Performance

    - + CVC4 0.01761516 @@ -174,7 +174,7 @@

    Sequential Performance

    - + Yices2 0.01118916 @@ -188,7 +188,7 @@

    Sequential Performance

    - + CVC4 0.00497633 @@ -202,7 +202,7 @@

    Sequential Performance

    - + Vampire 0.00412451 @@ -216,7 +216,7 @@

    Sequential Performance

    - + CVC4 0.00342431 @@ -230,7 +230,7 @@

    Sequential Performance

    - + Vampire 0.0025714 @@ -244,7 +244,7 @@

    Sequential Performance

    - + CVC4 0.00181755 @@ -258,7 +258,7 @@

    Sequential Performance

    - + CVC4 0.00151994 @@ -272,7 +272,7 @@

    Sequential Performance

    - + CVC4 0.00147308 @@ -286,7 +286,7 @@

    Sequential Performance

    - + CVC4 0.00142444 @@ -300,7 +300,7 @@

    Sequential Performance

    - + CVC4 0.00140509 @@ -314,7 +314,7 @@

    Sequential Performance

    - + Bitwuzla 0.00115373 @@ -328,7 +328,7 @@

    Sequential Performance

    - + Yices2 0.00106411 @@ -342,7 +342,7 @@

    Sequential Performance

    - + Vampire 0.00100642 @@ -356,7 +356,7 @@

    Sequential Performance

    - + CVC4 0.00059623 @@ -370,7 +370,7 @@

    Sequential Performance

    - + Vampire 0.00055049 @@ -384,7 +384,7 @@

    Sequential Performance

    - + Vampire 0.00054497 @@ -398,7 +398,7 @@

    Sequential Performance

    - + CVC4 0.00051206 @@ -412,7 +412,7 @@

    Sequential Performance

    - + Vampire 0.00041205 @@ -426,7 +426,7 @@

    Sequential Performance

    - + Vampire 0.00040371 @@ -440,7 +440,7 @@

    Sequential Performance

    - + Vampire 0.00026663 @@ -454,7 +454,7 @@

    Sequential Performance

    - + Yices2 0.00021965 @@ -468,7 +468,7 @@

    Sequential Performance

    - + COLIBRI 0.00021512 @@ -482,7 +482,7 @@

    Sequential Performance

    - + Yices2 0.00021294 @@ -496,7 +496,7 @@

    Sequential Performance

    - + CVC4 0.0001959 @@ -510,7 +510,7 @@

    Sequential Performance

    - + Yices2 0.00017782 @@ -524,7 +524,7 @@

    Sequential Performance

    - + OpenSMT 0.00013699 @@ -538,7 +538,7 @@

    Sequential Performance

    - + CVC4 0.00010301 @@ -552,7 +552,7 @@

    Sequential Performance

    - + Bitwuzla 9.105e-05 @@ -566,7 +566,7 @@

    Sequential Performance

    - + CVC4 6.02e-05 @@ -580,7 +580,7 @@

    Sequential Performance

    - + Yices2 5.256e-05 @@ -594,7 +594,7 @@

    Sequential Performance

    - + Bitwuzla 4.542e-05 @@ -608,7 +608,7 @@

    Sequential Performance

    - + Bitwuzla 3.399e-05 @@ -622,7 +622,7 @@

    Sequential Performance

    - + SMTInterpol 2.266e-05 @@ -636,7 +636,7 @@

    Sequential Performance

    - + Yices2 1.322e-05 @@ -650,7 +650,7 @@

    Sequential Performance

    - + SMT-RAT 1.133e-05 @@ -664,7 +664,7 @@

    Sequential Performance

    - + Yices2 0.0 @@ -678,7 +678,7 @@

    Sequential Performance

    - + Yices2 0.0 @@ -692,7 +692,7 @@

    Sequential Performance

    - + Yices2 0.0 @@ -706,7 +706,7 @@

    Sequential Performance

    - + Yices2 0.0 @@ -720,7 +720,7 @@

    Sequential Performance

    - + COLIBRI 0.0 @@ -734,7 +734,7 @@

    Sequential Performance

    - + Yices2 0.0 @@ -748,7 +748,7 @@

    Sequential Performance

    - + Yices2 0.0 @@ -762,7 +762,7 @@

    Sequential Performance

    - + Vampire 0.0 @@ -776,7 +776,7 @@

    Sequential Performance

    - + Bitwuzla 0.0 @@ -790,7 +790,7 @@

    Sequential Performance

    - + veriT 0.0 @@ -819,7 +819,7 @@

    Parallel Performance

    - + CVC4 0.01239715 @@ -833,7 +833,7 @@

    Parallel Performance

    - + Yices2 0.01118916 @@ -847,7 +847,7 @@

    Parallel Performance

    - + Vampire 0.00438695 @@ -861,7 +861,7 @@

    Parallel Performance

    - + CVC4 0.00422692 @@ -875,7 +875,7 @@

    Parallel Performance

    - + CVC4 0.00342431 @@ -889,7 +889,7 @@

    Parallel Performance

    - + Vampire 0.0028347 @@ -903,7 +903,7 @@

    Parallel Performance

    - + CVC4 0.00174333 @@ -917,7 +917,7 @@

    Parallel Performance

    - + CVC4 0.00149912 @@ -931,7 +931,7 @@

    Parallel Performance

    - + CVC4 0.00147308 @@ -945,7 +945,7 @@

    Parallel Performance

    - + CVC4 0.00140509 @@ -959,7 +959,7 @@

    Parallel Performance

    - + CVC4 0.00140493 @@ -973,7 +973,7 @@

    Parallel Performance

    - + Vampire 0.00110292 @@ -987,7 +987,7 @@

    Parallel Performance

    - + Yices2 0.00106411 @@ -1001,7 +1001,7 @@

    Parallel Performance

    - + Vampire 0.00104069 @@ -1015,7 +1015,7 @@

    Parallel Performance

    - + Bitwuzla 0.00083114 @@ -1029,7 +1029,7 @@

    Parallel Performance

    - + Vampire 0.00063675 @@ -1043,7 +1043,7 @@

    Parallel Performance

    - + CVC4 0.00059623 @@ -1057,7 +1057,7 @@

    Parallel Performance

    - + Vampire 0.00057043 @@ -1071,7 +1071,7 @@

    Parallel Performance

    - + Vampire 0.00046659 @@ -1085,7 +1085,7 @@

    Parallel Performance

    - + Vampire 0.00026663 @@ -1099,7 +1099,7 @@

    Parallel Performance

    - + Yices2 0.00021965 @@ -1113,7 +1113,7 @@

    Parallel Performance

    - + COLIBRI 0.00021512 @@ -1127,7 +1127,7 @@

    Parallel Performance

    - + Yices2 0.00021294 @@ -1141,7 +1141,7 @@

    Parallel Performance

    - + CVC4 0.0001959 @@ -1155,7 +1155,7 @@

    Parallel Performance

    - + Yices2 0.00017782 @@ -1169,7 +1169,7 @@

    Parallel Performance

    - + OpenSMT 0.00013699 @@ -1183,7 +1183,7 @@

    Parallel Performance

    - + CVC4 0.00010301 @@ -1197,7 +1197,7 @@

    Parallel Performance

    - + Bitwuzla 9.105e-05 @@ -1211,7 +1211,7 @@

    Parallel Performance

    - + CVC4 6.02e-05 @@ -1225,7 +1225,7 @@

    Parallel Performance

    - + Yices2 5.256e-05 @@ -1239,7 +1239,7 @@

    Parallel Performance

    - + CVC4 4.877e-05 @@ -1253,7 +1253,7 @@

    Parallel Performance

    - + Bitwuzla 4.542e-05 @@ -1267,7 +1267,7 @@

    Parallel Performance

    - + Bitwuzla 3.399e-05 @@ -1281,7 +1281,7 @@

    Parallel Performance

    - + SMTInterpol 2.266e-05 @@ -1295,7 +1295,7 @@

    Parallel Performance

    - + Yices2 1.322e-05 @@ -1309,7 +1309,7 @@

    Parallel Performance

    - + SMT-RAT 1.133e-05 @@ -1323,7 +1323,7 @@

    Parallel Performance

    - + Yices2 0.0 @@ -1337,7 +1337,7 @@

    Parallel Performance

    - + Yices2 0.0 @@ -1351,7 +1351,7 @@

    Parallel Performance

    - + Yices2 0.0 @@ -1365,7 +1365,7 @@

    Parallel Performance

    - + Yices2 0.0 @@ -1379,7 +1379,7 @@

    Parallel Performance

    - + COLIBRI 0.0 @@ -1393,7 +1393,7 @@

    Parallel Performance

    - + Yices2 0.0 @@ -1407,7 +1407,7 @@

    Parallel Performance

    - + Yices2 0.0 @@ -1421,7 +1421,7 @@

    Parallel Performance

    - + Vampire 0.0 @@ -1435,7 +1435,7 @@

    Parallel Performance

    - + Bitwuzla 0.0 @@ -1449,7 +1449,7 @@

    Parallel Performance

    - + veriT 0.0 @@ -1478,7 +1478,7 @@

    SAT Performance

    - + CVC4 0.04946516 @@ -1492,7 +1492,7 @@

    SAT Performance

    - + SMTInterpol 0.0309754 @@ -1506,7 +1506,7 @@

    SAT Performance

    - + CVC4 0.01596614 @@ -1520,7 +1520,7 @@

    SAT Performance

    - + CVC4 0.00898241 @@ -1534,7 +1534,7 @@

    SAT Performance

    - + Vampire 0.00869601 @@ -1548,7 +1548,7 @@

    SAT Performance

    - + CVC4 0.00609043 @@ -1562,7 +1562,7 @@

    SAT Performance

    - + CVC4 0.00351328 @@ -1576,7 +1576,7 @@

    SAT Performance

    - + CVC4 0.00329266 @@ -1590,7 +1590,7 @@

    SAT Performance

    - + CVC4 0.00286683 @@ -1604,7 +1604,7 @@

    SAT Performance

    - + CVC4 0.00260422 @@ -1618,7 +1618,7 @@

    SAT Performance

    - + CVC4 0.00222095 @@ -1632,7 +1632,7 @@

    SAT Performance

    - + Bitwuzla 0.00170867 @@ -1646,7 +1646,7 @@

    SAT Performance

    - + Yices2 0.00155561 @@ -1660,7 +1660,7 @@

    SAT Performance

    - + CVC4 0.00102104 @@ -1674,7 +1674,7 @@

    SAT Performance

    - + SMTInterpol 0.00043059 @@ -1688,7 +1688,7 @@

    SAT Performance

    - + Yices2 0.00023796 @@ -1702,7 +1702,7 @@

    SAT Performance

    - + Yices2 0.00021404 @@ -1716,7 +1716,7 @@

    SAT Performance

    - + SMTInterpol 0.00018886 @@ -1730,7 +1730,7 @@

    SAT Performance

    - + OpenSMT 0.00014798 @@ -1744,7 +1744,7 @@

    SAT Performance

    - + Bitwuzla 0.00012184 @@ -1758,7 +1758,7 @@

    SAT Performance

    - + Yices2 9.853e-05 @@ -1772,7 +1772,7 @@

    SAT Performance

    - + COLIBRI 8.679e-05 @@ -1786,7 +1786,7 @@

    SAT Performance

    - + Bitwuzla 8.299e-05 @@ -1800,7 +1800,7 @@

    SAT Performance

    - + CVC4 8.026e-05 @@ -1814,7 +1814,7 @@

    SAT Performance

    - + Yices2 7.108e-05 @@ -1828,7 +1828,7 @@

    SAT Performance

    - + Yices2 1.906e-05 @@ -1842,7 +1842,7 @@

    SAT Performance

    - + Yices2 0.0 @@ -1856,7 +1856,7 @@

    SAT Performance

    - + Yices2 0.0 @@ -1870,7 +1870,7 @@

    SAT Performance

    - + Yices2 0.0 @@ -1884,7 +1884,7 @@

    SAT Performance

    - + Bitwuzla 0.0 @@ -1898,7 +1898,7 @@

    SAT Performance

    - + Yices2 0.0 @@ -1912,7 +1912,7 @@

    SAT Performance

    - + Yices2 0.0 @@ -1926,7 +1926,7 @@

    SAT Performance

    - + Yices2 0.0 @@ -1940,7 +1940,7 @@

    SAT Performance

    - + Yices2 0.0 @@ -1954,7 +1954,7 @@

    SAT Performance

    - + Yices2 0.0 @@ -1983,7 +1983,7 @@

    UNSAT Performance

    - + Yices2 0.02733943 @@ -1997,7 +1997,7 @@

    UNSAT Performance

    - + CVC4 0.0130312 @@ -2011,7 +2011,7 @@

    UNSAT Performance

    - + Vampire 0.00504379 @@ -2025,7 +2025,7 @@

    UNSAT Performance

    - + CVC4 0.00334557 @@ -2039,7 +2039,7 @@

    UNSAT Performance

    - + Vampire 0.0028347 @@ -2053,7 +2053,7 @@

    UNSAT Performance

    - + Vampire 0.00177968 @@ -2067,7 +2067,7 @@

    UNSAT Performance

    - + CVC4 0.00149912 @@ -2081,7 +2081,7 @@

    UNSAT Performance

    - + Vampire 0.00129759 @@ -2095,7 +2095,7 @@

    UNSAT Performance

    - + Vampire 0.00110292 @@ -2109,7 +2109,7 @@

    UNSAT Performance

    - + UltimateEliminator+MathSAT 0.00105242 @@ -2123,7 +2123,7 @@

    UNSAT Performance

    - + Vampire 0.00104445 @@ -2137,7 +2137,7 @@

    UNSAT Performance

    - + Vampire 0.00063675 @@ -2151,7 +2151,7 @@

    UNSAT Performance

    - + Vampire 0.0006335 @@ -2165,7 +2165,7 @@

    UNSAT Performance

    - + CVC4 0.00061807 @@ -2179,7 +2179,7 @@

    UNSAT Performance

    - + Vampire 0.00046659 @@ -2193,7 +2193,7 @@

    UNSAT Performance

    - + COLIBRI 0.00037246 @@ -2207,7 +2207,7 @@

    UNSAT Performance

    - + Yices2 0.00034341 @@ -2221,7 +2221,7 @@

    UNSAT Performance

    - + CVC4 0.00031161 @@ -2235,7 +2235,7 @@

    UNSAT Performance

    - + Yices2 0.00028202 @@ -2249,7 +2249,7 @@

    UNSAT Performance

    - + Vampire 0.00026663 @@ -2263,7 +2263,7 @@

    UNSAT Performance

    - + SMTInterpol 0.00021071 @@ -2277,7 +2277,7 @@

    UNSAT Performance

    - + CVC4 0.00019385 @@ -2291,7 +2291,7 @@

    UNSAT Performance

    - + Yices2 0.00015864 @@ -2305,7 +2305,7 @@

    UNSAT Performance

    - + OpenSMT 0.00012464 @@ -2319,7 +2319,7 @@

    UNSAT Performance

    - + Bitwuzla 0.00010866 @@ -2333,7 +2333,7 @@

    UNSAT Performance

    - + Vampire 9.632e-05 @@ -2347,7 +2347,7 @@

    UNSAT Performance

    - + CVC4 8.394e-05 @@ -2361,7 +2361,7 @@

    UNSAT Performance

    - + Bitwuzla 8.383e-05 @@ -2375,7 +2375,7 @@

    UNSAT Performance

    - + CVC4 4.877e-05 @@ -2389,7 +2389,7 @@

    UNSAT Performance

    - + Yices2 3.455e-05 @@ -2403,7 +2403,7 @@

    UNSAT Performance

    - + Bitwuzla 2.791e-05 @@ -2417,7 +2417,7 @@

    UNSAT Performance

    - + Yices2 1.586e-05 @@ -2431,7 +2431,7 @@

    UNSAT Performance

    - + SMT-RAT 1.133e-05 @@ -2445,7 +2445,7 @@

    UNSAT Performance

    - + Vampire 0.0 @@ -2459,7 +2459,7 @@

    UNSAT Performance

    - + Bitwuzla 0.0 @@ -2473,7 +2473,7 @@

    UNSAT Performance

    - + Yices2 0.0 @@ -2487,7 +2487,7 @@

    UNSAT Performance

    - + Yices2 0.0 @@ -2501,7 +2501,7 @@

    UNSAT Performance

    - + Yices2 0.0 @@ -2515,7 +2515,7 @@

    UNSAT Performance

    - + COLIBRI 0.0 @@ -2529,7 +2529,7 @@

    UNSAT Performance

    - + Yices2 0.0 @@ -2543,7 +2543,7 @@

    UNSAT Performance

    - + Yices2 0.0 @@ -2557,7 +2557,7 @@

    UNSAT Performance

    - + CVC4 0.0 @@ -2571,7 +2571,7 @@

    UNSAT Performance

    - + Vampire 0.0 @@ -2585,7 +2585,7 @@

    UNSAT Performance

    - + Yices2 0.0 @@ -2599,7 +2599,7 @@

    UNSAT Performance

    - + Yices2 0.0 @@ -2613,7 +2613,7 @@

    UNSAT Performance

    - + veriT 0.0 @@ -2642,7 +2642,7 @@

    24s Performance

    - + Yices2 0.0323688 @@ -2656,7 +2656,7 @@

    24s Performance

    - + CVC4 0.02161643 @@ -2670,7 +2670,7 @@

    24s Performance

    - + Vampire 0.01830873 @@ -2684,7 +2684,7 @@

    24s Performance

    - + Yices2 0.00638302 @@ -2698,7 +2698,7 @@

    24s Performance

    - + Yices2 0.00383084 @@ -2712,7 +2712,7 @@

    24s Performance

    - + Bitwuzla 0.00341951 @@ -2726,7 +2726,7 @@

    24s Performance

    - + CVC4 0.00299737 @@ -2740,7 +2740,7 @@

    24s Performance

    - + Vampire 0.00293311 @@ -2754,7 +2754,7 @@

    24s Performance

    - + CVC4 0.0029014 @@ -2768,7 +2768,7 @@

    24s Performance

    - + CVC4 0.00189386 @@ -2782,7 +2782,7 @@

    24s Performance

    - + Vampire 0.00168046 @@ -2796,7 +2796,7 @@

    24s Performance

    - + CVC4 0.00149196 @@ -2810,7 +2810,7 @@

    24s Performance

    - + CVC4 0.00142444 @@ -2824,7 +2824,7 @@

    24s Performance

    - + Vampire 0.00139894 @@ -2838,7 +2838,7 @@

    24s Performance

    - + CVC4 0.00124728 @@ -2852,7 +2852,7 @@

    24s Performance

    - + CVC4 0.00100952 @@ -2866,7 +2866,7 @@

    24s Performance

    - + Vampire 0.00090651 @@ -2880,7 +2880,7 @@

    24s Performance

    - + Yices2 0.00060881 @@ -2894,7 +2894,7 @@

    24s Performance

    - + COLIBRI 0.00058073 @@ -2908,7 +2908,7 @@

    24s Performance

    - + Yices2 0.00043314 @@ -2922,7 +2922,7 @@

    24s Performance

    - + Bitwuzla 0.00040266 @@ -2936,7 +2936,7 @@

    24s Performance

    - + Yices2 0.00029155 @@ -2950,7 +2950,7 @@

    24s Performance

    - + CVC4 0.00028975 @@ -2964,7 +2964,7 @@

    24s Performance

    - + CVC4 0.00027656 @@ -2978,7 +2978,7 @@

    24s Performance

    - + Vampire 0.00024719 @@ -2992,7 +2992,7 @@

    24s Performance

    - + CVC4 0.0002286 @@ -3006,7 +3006,7 @@

    24s Performance

    - + Bitwuzla 0.00017042 @@ -3020,7 +3020,7 @@

    24s Performance

    - + Yices2 0.00016317 @@ -3034,7 +3034,7 @@

    24s Performance

    - + Bitwuzla 0.00012565 @@ -3048,7 +3048,7 @@

    24s Performance

    - + Vampire 0.00012461 @@ -3062,7 +3062,7 @@

    24s Performance

    - + Yices2 0.0001204 @@ -3076,7 +3076,7 @@

    24s Performance

    - + Yices2 0.00011331 @@ -3090,7 +3090,7 @@

    24s Performance

    - + CVC4 7.554e-05 @@ -3104,7 +3104,7 @@

    24s Performance

    - + Yices2 5.666e-05 @@ -3118,7 +3118,7 @@

    24s Performance

    - + CVC4 5.504e-05 @@ -3132,7 +3132,7 @@

    24s Performance

    - + COLIBRI 3.408e-05 @@ -3146,7 +3146,7 @@

    24s Performance

    - + SMTInterpol 2.266e-05 @@ -3160,7 +3160,7 @@

    24s Performance

    - + Yices2 0.0 @@ -3174,7 +3174,7 @@

    24s Performance

    - + Yices2 0.0 @@ -3188,7 +3188,7 @@

    24s Performance

    - + Yices2 0.0 @@ -3202,7 +3202,7 @@

    24s Performance

    - + Yices2 0.0 @@ -3216,7 +3216,7 @@

    24s Performance

    - + Vampire 0.0 @@ -3230,7 +3230,7 @@

    24s Performance

    - + Bitwuzla 0.0 @@ -3244,7 +3244,7 @@

    24s Performance

    - + Yices2 0.0 @@ -3258,7 +3258,7 @@

    24s Performance

    - + veriT 0.0 @@ -3290,7 +3290,6 @@

    24s Performance

    - + - diff --git a/archive/2020/results/largest-contribution-unsat-core.html b/archive/2020/results/largest-contribution-unsat-core.html index b6ba20ea..da795f65 100644 --- a/archive/2020/results/largest-contribution-unsat-core.html +++ b/archive/2020/results/largest-contribution-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -90,35 +90,35 @@

    Winners

    - + - - + + - - - - + + + + - + - - + + - - - - + + + +
    Sequential PerformanceParallel Performance
    - + CVC4-uc - + - + CVC4-uc - +

    Sequential Performance

    @@ -136,7 +136,7 @@

    Sequential Performance

    - + CVC4-uc 0.06083489 @@ -150,7 +150,7 @@

    Sequential Performance

    - + CVC4-uc 0.05827922 @@ -164,7 +164,7 @@

    Sequential Performance

    - + CVC4-uc 0.04005245 @@ -178,7 +178,7 @@

    Sequential Performance

    - + CVC4-uc 0.01516674 @@ -192,7 +192,7 @@

    Sequential Performance

    - + CVC4-uc 0.00991001 @@ -206,7 +206,7 @@

    Sequential Performance

    - + CVC4-uc 0.00376889 @@ -220,7 +220,7 @@

    Sequential Performance

    - + Yices2 0.0027606 @@ -234,7 +234,7 @@

    Sequential Performance

    - + Bitwuzla 0.00165592 @@ -248,7 +248,7 @@

    Sequential Performance

    - + Yices2 0.00162197 @@ -262,7 +262,7 @@

    Sequential Performance

    - + Bitwuzla 0.00131906 @@ -276,7 +276,7 @@

    Sequential Performance

    - + Yices2 0.0010159 @@ -290,7 +290,7 @@

    Sequential Performance

    - + CVC4-uc 0.00071746 @@ -304,7 +304,7 @@

    Sequential Performance

    - + SMTInterpol 0.00067835 @@ -318,7 +318,7 @@

    Sequential Performance

    - + Yices2 0.00025858 @@ -332,7 +332,7 @@

    Sequential Performance

    - + CVC4-uc 0.00016535 @@ -346,7 +346,7 @@

    Sequential Performance

    - + CVC4-uc 0.00014086 @@ -360,7 +360,7 @@

    Sequential Performance

    - + SMTInterpol 6.139e-05 @@ -374,7 +374,7 @@

    Sequential Performance

    - + CVC4-uc 4.793e-05 @@ -388,7 +388,7 @@

    Sequential Performance

    - + Yices2 0.0 @@ -402,7 +402,7 @@

    Sequential Performance

    - + Yices2 0.0 @@ -431,7 +431,7 @@

    Parallel Performance

    - + CVC4-uc 0.06083489 @@ -445,7 +445,7 @@

    Parallel Performance

    - + CVC4-uc 0.05827922 @@ -459,7 +459,7 @@

    Parallel Performance

    - + CVC4-uc 0.04005245 @@ -473,7 +473,7 @@

    Parallel Performance

    - + CVC4-uc 0.01516674 @@ -487,7 +487,7 @@

    Parallel Performance

    - + CVC4-uc 0.00991001 @@ -501,7 +501,7 @@

    Parallel Performance

    - + CVC4-uc 0.00376889 @@ -515,7 +515,7 @@

    Parallel Performance

    - + Yices2 0.0027606 @@ -529,7 +529,7 @@

    Parallel Performance

    - + Bitwuzla 0.00165592 @@ -543,7 +543,7 @@

    Parallel Performance

    - + Yices2 0.00162197 @@ -557,7 +557,7 @@

    Parallel Performance

    - + Bitwuzla 0.00131906 @@ -571,7 +571,7 @@

    Parallel Performance

    - + Yices2 0.0010159 @@ -585,7 +585,7 @@

    Parallel Performance

    - + CVC4-uc 0.00071746 @@ -599,7 +599,7 @@

    Parallel Performance

    - + SMTInterpol 0.00067835 @@ -613,7 +613,7 @@

    Parallel Performance

    - + Yices2 0.00025858 @@ -627,7 +627,7 @@

    Parallel Performance

    - + CVC4-uc 0.00016535 @@ -641,7 +641,7 @@

    Parallel Performance

    - + CVC4-uc 0.00014086 @@ -655,7 +655,7 @@

    Parallel Performance

    - + SMTInterpol 6.139e-05 @@ -669,7 +669,7 @@

    Parallel Performance

    - + CVC4-uc 4.793e-05 @@ -683,7 +683,7 @@

    Parallel Performance

    - + Yices2 0.0 @@ -697,7 +697,7 @@

    Parallel Performance

    - + Yices2 0.0 @@ -735,7 +735,6 @@

    Parallel Performance

    - + - diff --git a/archive/2020/results/lia-incremental.html b/archive/2020/results/lia-incremental.html index b6f0b023..a3f50539 100644 --- a/archive/2020/results/lia-incremental.html +++ b/archive/2020/results/lia-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    LIA (Incremental Track)

    Competition results for the LIA - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    LIA (Incremental Track)

    Parallel Performance UltimateEliminator+MathSAT - - + + @@ -124,7 +124,7 @@

    LIA (Incremental Track)

    - + z3n 0 253937.276.4900 @@ -133,7 +133,7 @@

    LIA (Incremental Track)

    - + 2018-Z3 (incremental)n 0 253938.8128.1200 @@ -142,7 +142,7 @@

    LIA (Incremental Track)

    - + UltimateEliminator+MathSAT 0 2539399.92545.56200 @@ -151,7 +151,7 @@

    LIA (Incremental Track)

    - + CVC4-inc 0 2539361.13959.86500 @@ -160,7 +160,7 @@

    LIA (Incremental Track)

    - + SMTInterpol 0 2539185.64821.1920 @@ -169,7 +169,7 @@

    LIA (Incremental Track)

    - + SMTInterpol-fixedn 0 2539164.76421.73920 @@ -193,7 +193,6 @@

    LIA (Incremental Track)

    - + - diff --git a/archive/2020/results/lia-single-query.html b/archive/2020/results/lia-single-query.html index b9e37fd0..44b7bfbb 100644 --- a/archive/2020/results/lia-single-query.html +++ b/archive/2020/results/lia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    LIA (Single Query Track)

    Competition results for the LIA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    LIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) CVC4CVC4CVC4 - - + + CVC4 - - + + CVC4 - + @@ -131,7 +131,7 @@

    LIA (Single Query Track)

    - + z3n 0 198 @@ -142,7 +142,7 @@

    LIA (Single Query Track)

    - + 2019-Z3n 0 198 @@ -153,7 +153,7 @@

    LIA (Single Query Track)

    - + CVC4 0 198 @@ -164,7 +164,7 @@

    LIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 107 @@ -175,7 +175,7 @@

    LIA (Single Query Track)

    - + Vampire 0 100 @@ -186,7 +186,7 @@

    LIA (Single Query Track)

    - + SMTInterpol-fixedn 0 71 @@ -197,7 +197,7 @@

    LIA (Single Query Track)

    - + SMTInterpol 0 71 @@ -208,7 +208,7 @@

    LIA (Single Query Track)

    - + veriT 0 42 @@ -219,7 +219,7 @@

    LIA (Single Query Track)

    - + veriT+viten 0 42 @@ -241,7 +241,7 @@

    LIA (Single Query Track)

    - + z3n 0 1989.3889.4211989010800 @@ -250,7 +250,7 @@

    LIA (Single Query Track)

    - + 2019-Z3n 0 1989.829.8441989010800 @@ -259,7 +259,7 @@

    LIA (Single Query Track)

    - + CVC4 0 19828.42728.3791989010800 @@ -268,7 +268,7 @@

    LIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 10754687.27954230.27210721869144 @@ -277,7 +277,7 @@

    LIA (Single Query Track)

    - + Vampire 0 100121305.582117645.4431001999898 @@ -286,7 +286,7 @@

    LIA (Single Query Track)

    - + SMTInterpol-fixedn 0 7173496.38272975.9587176412751 @@ -295,7 +295,7 @@

    LIA (Single Query Track)

    - + SMTInterpol 0 7173519.7572989.5217176412751 @@ -304,7 +304,7 @@

    LIA (Single Query Track)

    - + veriT 0 42156762.031156584.0442042156104 @@ -313,7 +313,7 @@

    LIA (Single Query Track)

    - + veriT+viten 0 42184807.49184807.52842042156153 @@ -333,7 +333,7 @@

    LIA (Single Query Track)

    - + z3n 0 904.7864.794909001080 @@ -342,7 +342,7 @@

    LIA (Single Query Track)

    - + 2019-Z3n 0 904.9044.907909001080 @@ -351,7 +351,7 @@

    LIA (Single Query Track)

    - + CVC4 0 9022.63822.62909001080 @@ -360,7 +360,7 @@

    LIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 2139923.81339594.912121017744 @@ -369,7 +369,7 @@

    LIA (Single Query Track)

    - + SMTInterpol-fixedn 0 757760.43357415.94677019151 @@ -378,7 +378,7 @@

    LIA (Single Query Track)

    - + SMTInterpol 0 757772.37757421.45877019151 @@ -387,7 +387,7 @@

    LIA (Single Query Track)

    - + Vampire 0 1110400.273106795.97411019798 @@ -396,7 +396,7 @@

    LIA (Single Query Track)

    - + veriT 0 099996.2499996.887000198104 @@ -405,7 +405,7 @@

    LIA (Single Query Track)

    - + veriT+viten 0 0106807.049106807.048000198153 @@ -425,7 +425,7 @@

    LIA (Single Query Track)

    - + z3n 0 1084.6024.6271080108900 @@ -434,7 +434,7 @@

    LIA (Single Query Track)

    - + 2019-Z3n 0 1084.9164.9371080108900 @@ -443,7 +443,7 @@

    LIA (Single Query Track)

    - + CVC4 0 1085.795.7591080108900 @@ -452,7 +452,7 @@

    LIA (Single Query Track)

    - + Vampire 0 9910905.3110849.468990999998 @@ -461,7 +461,7 @@

    LIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 8614763.46614635.3628608611244 @@ -470,7 +470,7 @@

    LIA (Single Query Track)

    - + SMTInterpol-fixedn 0 6415735.94915560.0136406413451 @@ -479,7 +479,7 @@

    LIA (Single Query Track)

    - + SMTInterpol 0 6415747.37315568.0626406413451 @@ -488,7 +488,7 @@

    LIA (Single Query Track)

    - + veriT 0 4256765.79156587.15342042156104 @@ -497,7 +497,7 @@

    LIA (Single Query Track)

    - + veriT+viten 0 4278000.44178000.4842042156153 @@ -517,7 +517,7 @@

    LIA (Single Query Track)

    - + z3n 0 1989.3889.4211989010800 @@ -526,7 +526,7 @@

    LIA (Single Query Track)

    - + 2019-Z3n 0 1989.829.8441989010800 @@ -535,7 +535,7 @@

    LIA (Single Query Track)

    - + CVC4 0 19828.42728.3791989010800 @@ -544,7 +544,7 @@

    LIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 1061934.8051645.71310620869251 @@ -553,7 +553,7 @@

    LIA (Single Query Track)

    - + Vampire 0 1002457.4322401.5431001999898 @@ -562,7 +562,7 @@

    LIA (Single Query Track)

    - + SMTInterpol-fixedn 0 712249.732080.2777176412778 @@ -571,7 +571,7 @@

    LIA (Single Query Track)

    - + SMTInterpol 0 712266.3442083.567176412778 @@ -580,7 +580,7 @@

    LIA (Single Query Track)

    - + veriT+viten 0 423703.493703.52842042156153 @@ -589,7 +589,7 @@

    LIA (Single Query Track)

    - + veriT 0 423720.593720.58442042156155 @@ -613,7 +613,6 @@

    LIA (Single Query Track)

    - + - diff --git a/archive/2020/results/lia-unsat-core.html b/archive/2020/results/lia-unsat-core.html index 2c78db76..27c2e529 100644 --- a/archive/2020/results/lia-unsat-core.html +++ b/archive/2020/results/lia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    LIA (Unsat Core Track)

    Competition results for the LIA - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    LIA (Unsat Core Track)

    Sequential PerformanceParallel Performance CVC4-ucCVC4-uc - - + + @@ -126,7 +126,7 @@

    LIA (Unsat Core Track)

    - + CVC4-uc 0 9 @@ -137,7 +137,7 @@

    LIA (Unsat Core Track)

    - + z3n 0 8 @@ -148,7 +148,7 @@

    LIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 1 @@ -159,7 +159,7 @@

    LIA (Unsat Core Track)

    - + SMTInterpol-fixedn 0 0 @@ -170,7 +170,7 @@

    LIA (Unsat Core Track)

    - + SMTInterpol 0 0 @@ -192,7 +192,7 @@

    LIA (Unsat Core Track)

    - + CVC4-uc 0 911.1911.410 @@ -201,7 +201,7 @@

    LIA (Unsat Core Track)

    - + z3n 0 8154.445154.4950 @@ -210,7 +210,7 @@

    LIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 121416.91621123.92217 @@ -219,7 +219,7 @@

    LIA (Unsat Core Track)

    - + SMTInterpol-fixedn 0 025056.45524782.70917 @@ -228,7 +228,7 @@

    LIA (Unsat Core Track)

    - + SMTInterpol 0 025107.59824811.53217 @@ -252,7 +252,6 @@

    LIA (Unsat Core Track)

    - + - diff --git a/archive/2020/results/lra-incremental.html b/archive/2020/results/lra-incremental.html index f237b967..c3f978d2 100644 --- a/archive/2020/results/lra-incremental.html +++ b/archive/2020/results/lra-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    LRA (Incremental Track)

    Competition results for the LRA - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    LRA (Incremental Track)

    Parallel Performance CVC4-inc - - + + @@ -124,7 +124,7 @@

    LRA (Incremental Track)

    - + CVC4-inc 0 15969125.364124.6300 @@ -133,7 +133,7 @@

    LRA (Incremental Track)

    - + UltimateEliminator+MathSAT 0 1382261.99928.33521470 @@ -142,7 +142,7 @@

    LRA (Incremental Track)

    - + z3n 0 135843611.1163610.8623853 @@ -151,7 +151,7 @@

    LRA (Incremental Track)

    - + SMTInterpol-fixedn 0 127191230.9371210.56532501 @@ -160,7 +160,7 @@

    LRA (Incremental Track)

    - + SMTInterpol 0 127191231.0481210.5832501 @@ -184,7 +184,6 @@

    LRA (Incremental Track)

    - + - diff --git a/archive/2020/results/lra-single-query.html b/archive/2020/results/lra-single-query.html index 9c5e7a2e..cb8f94bd 100644 --- a/archive/2020/results/lra-single-query.html +++ b/archive/2020/results/lra-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    LRA (Single Query Track)

    Competition results for the LRA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    LRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) CVC4CVC4CVC4 - - + + CVC4 - - + + CVC4 - + @@ -131,7 +131,7 @@

    LRA (Single Query Track)

    - + 2019-Par4n 0 755 @@ -142,7 +142,7 @@

    LRA (Single Query Track)

    - + z3n 0 744 @@ -153,7 +153,7 @@

    LRA (Single Query Track)

    - + 2019-Z3n 0 742 @@ -164,7 +164,7 @@

    LRA (Single Query Track)

    - + CVC4 0 653 @@ -175,7 +175,7 @@

    LRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 504 @@ -186,7 +186,7 @@

    LRA (Single Query Track)

    - + Vampire 0 222 @@ -197,7 +197,7 @@

    LRA (Single Query Track)

    - + SMTInterpol 0 132 @@ -208,7 +208,7 @@

    LRA (Single Query Track)

    - + SMTInterpol-fixedn 0 132 @@ -230,7 +230,7 @@

    LRA (Single Query Track)

    - + 2019-Par4n 0 76079786.40365156.7767603124484242 @@ -239,7 +239,7 @@

    LRA (Single Query Track)

    - + z3n 0 744103287.313103273.5677443054395858 @@ -248,7 +248,7 @@

    LRA (Single Query Track)

    - + 2019-Z3n 0 742103547.878103544.0687423054376060 @@ -257,7 +257,7 @@

    LRA (Single Query Track)

    - + CVC4 0 653201706.907202291.346653269384149149 @@ -266,7 +266,7 @@

    LRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 504370033.47368155.787504201303298297 @@ -275,7 +275,7 @@

    LRA (Single Query Track)

    - + Vampire 0 237739115.338693847.3272370237565565 @@ -284,7 +284,7 @@

    LRA (Single Query Track)

    - + SMTInterpol 0 13219655.61818391.48132113167014 @@ -293,7 +293,7 @@

    LRA (Single Query Track)

    - + SMTInterpol-fixedn 0 13219669.49618408.514132113167014 @@ -313,7 +313,7 @@

    LRA (Single Query Track)

    - + 2019-Par4n 0 31220829.36115243.897312312049042 @@ -322,7 +322,7 @@

    LRA (Single Query Track)

    - + 2019-Z3n 0 30524943.11324944.059305305049760 @@ -331,7 +331,7 @@

    LRA (Single Query Track)

    - + z3n 0 30525132.24625125.705305305049758 @@ -340,7 +340,7 @@

    LRA (Single Query Track)

    - + CVC4 0 26970934.03771271.2832692690533149 @@ -349,7 +349,7 @@

    LRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 201144077.879143398.9382012010601297 @@ -358,7 +358,7 @@

    LRA (Single Query Track)

    - + SMTInterpol 0 114949.86814359.05811080114 @@ -367,7 +367,7 @@

    LRA (Single Query Track)

    - + SMTInterpol-fixedn 0 114992.73614377.23811080114 @@ -376,7 +376,7 @@

    LRA (Single Query Track)

    - + Vampire 0 0391200.32383975.89000802565 @@ -396,7 +396,7 @@

    LRA (Single Query Track)

    - + 2019-Par4n 0 44844557.04135512.879448044835442 @@ -405,7 +405,7 @@

    LRA (Single Query Track)

    - + z3n 0 43963755.06763747.863439043936358 @@ -414,7 +414,7 @@

    LRA (Single Query Track)

    - + 2019-Z3n 0 43764204.76564200.01437043736560 @@ -423,7 +423,7 @@

    LRA (Single Query Track)

    - + CVC4 0 384116372.87116620.0633840384418149 @@ -432,7 +432,7 @@

    LRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 303211555.591210356.8493030303499297 @@ -441,7 +441,7 @@

    LRA (Single Query Track)

    - + Vampire 0 237333515.018295471.4372370237565565 @@ -450,7 +450,7 @@

    LRA (Single Query Track)

    - + SMTInterpol-fixedn 0 1314637.414018.25131013167114 @@ -459,7 +459,7 @@

    LRA (Single Query Track)

    - + SMTInterpol 0 1314670.7014019.3131013167114 @@ -479,7 +479,7 @@

    LRA (Single Query Track)

    - + 2019-Par4n 0 6943921.9233298.561694289405108108 @@ -488,7 +488,7 @@

    LRA (Single Query Track)

    - + z3n 0 6425114.3935097.564642279363160160 @@ -497,7 +497,7 @@

    LRA (Single Query Track)

    - + 2019-Z3n 0 6415209.6095202.519641279362161161 @@ -506,7 +506,7 @@

    LRA (Single Query Track)

    - + CVC4 0 5935222.6455222.518593244349209209 @@ -515,7 +515,7 @@

    LRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 45510851.29782.643455191264347346 @@ -524,7 +524,7 @@

    LRA (Single Query Track)

    - + Vampire 0 14416420.48315980.6281440144658658 @@ -533,7 +533,7 @@

    LRA (Single Query Track)

    - + SMTInterpol-fixedn 0 1322379.7871297.818132113167023 @@ -542,7 +542,7 @@

    LRA (Single Query Track)

    - + SMTInterpol 0 1322371.8761299.02132113167023 @@ -566,7 +566,6 @@

    LRA (Single Query Track)

    - + - diff --git a/archive/2020/results/nia-single-query.html b/archive/2020/results/nia-single-query.html index abded82b..83b8d6e7 100644 --- a/archive/2020/results/nia-single-query.html +++ b/archive/2020/results/nia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    NIA (Single Query Track)

    Competition results for the NIA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    NIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) CVC4CVC4CVC4 - - + + Vampire - - + + CVC4 - + @@ -131,7 +131,7 @@

    NIA (Single Query Track)

    - + 2018-Z3n 0 16 @@ -142,7 +142,7 @@

    NIA (Single Query Track)

    - + z3n 0 15 @@ -153,7 +153,7 @@

    NIA (Single Query Track)

    - + CVC4 0 12 @@ -164,7 +164,7 @@

    NIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 8 @@ -175,7 +175,7 @@

    NIA (Single Query Track)

    - + Vampire 0 4 @@ -197,7 +197,7 @@

    NIA (Single Query Track)

    - + 2018-Z3n 0 16391.879391.9141612410 @@ -206,7 +206,7 @@

    NIA (Single Query Track)

    - + z3n 0 15578.667578.8841512320 @@ -215,7 +215,7 @@

    NIA (Single Query Track)

    - + CVC4 0 123127.3163127.4151211152 @@ -224,7 +224,7 @@

    NIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 884.40347.82687190 @@ -233,7 +233,7 @@

    NIA (Single Query Track)

    - + Vampire 0 415993.96715701.644041313 @@ -253,7 +253,7 @@

    NIA (Single Query Track)

    - + 2018-Z3n 0 120.6730.6741212050 @@ -262,7 +262,7 @@

    NIA (Single Query Track)

    - + z3n 0 120.740.741212050 @@ -271,7 +271,7 @@

    NIA (Single Query Track)

    - + CVC4 0 11696.05696.0771111062 @@ -280,7 +280,7 @@

    NIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 757.10133.617770100 @@ -289,7 +289,7 @@

    NIA (Single Query Track)

    - + Vampire 0 014400.014400.00001713 @@ -309,7 +309,7 @@

    NIA (Single Query Track)

    - + 2018-Z3n 0 4391.206391.24404130 @@ -318,7 +318,7 @@

    NIA (Single Query Track)

    - + Vampire 0 41593.9671301.644041313 @@ -327,7 +327,7 @@

    NIA (Single Query Track)

    - + z3n 0 3577.928578.144303140 @@ -336,7 +336,7 @@

    NIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 127.30214.209101160 @@ -345,7 +345,7 @@

    NIA (Single Query Track)

    - + CVC4 0 12431.2662431.337101162 @@ -365,7 +365,7 @@

    NIA (Single Query Track)

    - + 2018-Z3n 0 1625.43325.4341612411 @@ -374,7 +374,7 @@

    NIA (Single Query Track)

    - + z3n 0 1549.32449.3251512322 @@ -383,7 +383,7 @@

    NIA (Single Query Track)

    - + CVC4 0 10123.266123.271109175 @@ -392,7 +392,7 @@

    NIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 884.40347.82687190 @@ -401,7 +401,7 @@

    NIA (Single Query Track)

    - + Vampire 0 3338.507338.5933031414 @@ -425,7 +425,6 @@

    NIA (Single Query Track)

    - + - diff --git a/archive/2020/results/nia-unsat-core.html b/archive/2020/results/nia-unsat-core.html index 692d7f59..34dec0dd 100644 --- a/archive/2020/results/nia-unsat-core.html +++ b/archive/2020/results/nia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    NIA (Unsat Core Track)

    Competition results for the NIA - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    NIA (Unsat Core Track)

    Sequential PerformanceParallel Performance —— - - + + @@ -126,7 +126,7 @@

    NIA (Unsat Core Track)

    - + CVC4-uc 0 0 @@ -137,7 +137,7 @@

    NIA (Unsat Core Track)

    - + z3n 0 0 @@ -148,7 +148,7 @@

    NIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -170,7 +170,7 @@

    NIA (Unsat Core Track)

    - + CVC4-uc 0 00.2130.2130 @@ -179,7 +179,7 @@

    NIA (Unsat Core Track)

    - + z3n 0 00.2310.2310 @@ -188,7 +188,7 @@

    NIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 016.9637.6660 @@ -212,7 +212,6 @@

    NIA (Unsat Core Track)

    - + - diff --git a/archive/2020/results/nra-single-query.html b/archive/2020/results/nra-single-query.html index 34ac8e66..82592583 100644 --- a/archive/2020/results/nra-single-query.html +++ b/archive/2020/results/nra-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    NRA (Single Query Track)

    Competition results for the NRA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    NRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) VampireVampire— - - + + Vampire - - + + Vampire - + @@ -131,7 +131,7 @@

    NRA (Single Query Track)

    - + z3n 0 90 @@ -142,7 +142,7 @@

    NRA (Single Query Track)

    - + 2019-Par4n 0 85 @@ -153,7 +153,7 @@

    NRA (Single Query Track)

    - + Vampire 0 82 @@ -164,7 +164,7 @@

    NRA (Single Query Track)

    - + CVC4 0 62 @@ -175,7 +175,7 @@

    NRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 1 @@ -197,7 +197,7 @@

    NRA (Single Query Track)

    - + z3n 0 903897.4433897.4819028833 @@ -206,7 +206,7 @@

    NRA (Single Query Track)

    - + 2019-Par4n 0 859709.1629658.3488518488 @@ -215,7 +215,7 @@

    NRA (Single Query Track)

    - + Vampire 0 8213380.39513308.295820821111 @@ -224,7 +224,7 @@

    NRA (Single Query Track)

    - + CVC4 0 6234870.66934872.246620623129 @@ -233,7 +233,7 @@

    NRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 1327.177224.263101920 @@ -253,7 +253,7 @@

    NRA (Single Query Track)

    - + z3n 0 20.0790.079220913 @@ -262,7 +262,7 @@

    NRA (Single Query Track)

    - + 2019-Par4n 0 11200.0071200.084110928 @@ -271,7 +271,7 @@

    NRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 07.0394.742000930 @@ -280,7 +280,7 @@

    NRA (Single Query Track)

    - + Vampire 0 02400.02400.00009311 @@ -289,7 +289,7 @@

    NRA (Single Query Track)

    - + CVC4 0 02400.02400.00009329 @@ -309,7 +309,7 @@

    NRA (Single Query Track)

    - + z3n 0 883897.3633897.4018808853 @@ -318,7 +318,7 @@

    NRA (Single Query Track)

    - + 2019-Par4n 0 848509.1568458.2648408498 @@ -327,7 +327,7 @@

    NRA (Single Query Track)

    - + Vampire 0 8210980.39510908.295820821111 @@ -336,7 +336,7 @@

    NRA (Single Query Track)

    - + CVC4 0 6232470.66932472.246620623129 @@ -345,7 +345,7 @@

    NRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 1320.138219.521101920 @@ -365,7 +365,7 @@

    NRA (Single Query Track)

    - + z3n 0 8999.52699.5338928744 @@ -374,7 +374,7 @@

    NRA (Single Query Track)

    - + 2019-Par4n 0 84225.252224.2548418399 @@ -383,7 +383,7 @@

    NRA (Single Query Track)

    - + Vampire 0 79383.365358.96790791414 @@ -392,7 +392,7 @@

    NRA (Single Query Track)

    - + CVC4 0 62754.264754.246620623131 @@ -401,7 +401,7 @@

    NRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 1327.177224.263101920 @@ -425,7 +425,6 @@

    NRA (Single Query Track)

    - + - diff --git a/archive/2020/results/qf-abv-incremental.html b/archive/2020/results/qf-abv-incremental.html index d1724095..e669d5e9 100644 --- a/archive/2020/results/qf-abv-incremental.html +++ b/archive/2020/results/qf-abv-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ABV (Incremental Track)

    Competition results for the QF_ABV - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_ABV (Incremental Track)

    Parallel Performance Bitwuzla - - + + @@ -124,7 +124,7 @@

    QF_ABV (Incremental Track)

    - + Bitwuzla-fixedn 0 124133556.36533540.9721818 @@ -133,7 +133,7 @@

    QF_ABV (Incremental Track)

    - + Bitwuzla 0 124133579.40933564.2711818 @@ -142,7 +142,7 @@

    QF_ABV (Incremental Track)

    - + Yices2 incremental 0 123813347.30413309.089217 @@ -151,7 +151,7 @@

    QF_ABV (Incremental Track)

    - + 2018-Boolector (incremental)n 0 123841815.25341839.6972117 @@ -160,7 +160,7 @@

    QF_ABV (Incremental Track)

    - + Yices2-fixed incrementaln 0 123713468.03413409.409228 @@ -169,7 +169,7 @@

    QF_ABV (Incremental Track)

    - + MathSAT5n 0 123120295.77820258.1572813 @@ -178,7 +178,7 @@

    QF_ABV (Incremental Track)

    - + z3n 0 1136152412.7152401.766123114 @@ -187,7 +187,7 @@

    QF_ABV (Incremental Track)

    - + CVC4-inc 0 109826609.69326563.87716118 @@ -211,7 +211,6 @@

    QF_ABV (Incremental Track)

    - + - diff --git a/archive/2020/results/qf-abv-single-query.html b/archive/2020/results/qf-abv-single-query.html index 5fb0efe7..e00f8782 100644 --- a/archive/2020/results/qf-abv-single-query.html +++ b/archive/2020/results/qf-abv-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ABV (Single Query Track)

    Competition results for the QF_ABV - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_ABV (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) BitwuzlaBitwuzlaBitwuzla - - + + Bitwuzla - - + + Bitwuzla - + @@ -131,7 +131,7 @@

    QF_ABV (Single Query Track)

    - + Bitwuzla 0 3370 @@ -142,7 +142,7 @@

    QF_ABV (Single Query Track)

    - + Bitwuzla-fixedn 0 3370 @@ -153,7 +153,7 @@

    QF_ABV (Single Query Track)

    - + 2019-Boolectorn 0 3369 @@ -164,7 +164,7 @@

    QF_ABV (Single Query Track)

    - + 2019-Par4n 0 3360 @@ -175,7 +175,7 @@

    QF_ABV (Single Query Track)

    - + Yices2 0 3358 @@ -186,7 +186,7 @@

    QF_ABV (Single Query Track)

    - + Yices2-fixedn 0 3358 @@ -197,7 +197,7 @@

    QF_ABV (Single Query Track)

    - + MathSAT5n 0 3311 @@ -208,7 +208,7 @@

    QF_ABV (Single Query Track)

    - + CVC4 0 3309 @@ -219,7 +219,7 @@

    QF_ABV (Single Query Track)

    - + z3n 0 3284 @@ -241,7 +241,7 @@

    QF_ABV (Single Query Track)

    - + Bitwuzla 0 337030619.81830632.3973370231110591515 @@ -250,7 +250,7 @@

    QF_ABV (Single Query Track)

    - + Bitwuzla-fixedn 0 337030647.8630643.0693370231110591515 @@ -259,7 +259,7 @@

    QF_ABV (Single Query Track)

    - + 2019-Boolectorn 0 336930977.89230971.4073369231110581616 @@ -268,7 +268,7 @@

    QF_ABV (Single Query Track)

    - + 2019-Par4n 0 336646773.20631145.953366230810581919 @@ -277,7 +277,7 @@

    QF_ABV (Single Query Track)

    - + Yices2 0 335854249.13454270.3173358230510532727 @@ -286,7 +286,7 @@

    QF_ABV (Single Query Track)

    - + Yices2-fixedn 0 335854262.25754294.4693358230510532727 @@ -295,7 +295,7 @@

    QF_ABV (Single Query Track)

    - + MathSAT5n 0 3311108329.961108300.4093311227810337473 @@ -304,7 +304,7 @@

    QF_ABV (Single Query Track)

    - + CVC4 0 3309106021.592106383.013309228310267676 @@ -313,7 +313,7 @@

    QF_ABV (Single Query Track)

    - + z3n 0 3284141873.987141822.753328422611023101101 @@ -333,7 +333,7 @@

    QF_ABV (Single Query Track)

    - + 2019-Boolectorn 0 23115497.6825487.168231123110107416 @@ -342,7 +342,7 @@

    QF_ABV (Single Query Track)

    - + Bitwuzla-fixedn 0 23116018.9416005.27231123110107415 @@ -351,7 +351,7 @@

    QF_ABV (Single Query Track)

    - + Bitwuzla 0 23116009.2536012.523231123110107415 @@ -360,7 +360,7 @@

    QF_ABV (Single Query Track)

    - + 2019-Par4n 0 230814271.0187450.359230823080107719 @@ -369,7 +369,7 @@

    QF_ABV (Single Query Track)

    - + Yices2 0 230513990.20713999.721230523050108027 @@ -378,7 +378,7 @@

    QF_ABV (Single Query Track)

    - + Yices2-fixedn 0 230514005.89414026.538230523050108027 @@ -387,7 +387,7 @@

    QF_ABV (Single Query Track)

    - + CVC4 0 228343169.39343506.049228322830110276 @@ -396,7 +396,7 @@

    QF_ABV (Single Query Track)

    - + MathSAT5n 0 227851014.54550988.625227822780110773 @@ -405,7 +405,7 @@

    QF_ABV (Single Query Track)

    - + z3n 0 226172328.62372290.012261226101124101 @@ -425,7 +425,7 @@

    QF_ABV (Single Query Track)

    - + Bitwuzla 0 105924610.56424619.873105901059232615 @@ -434,7 +434,7 @@

    QF_ABV (Single Query Track)

    - + Bitwuzla-fixedn 0 105924628.91824637.798105901059232615 @@ -443,7 +443,7 @@

    QF_ABV (Single Query Track)

    - + 2019-Par4n 0 105832502.18923695.591105801058232719 @@ -452,7 +452,7 @@

    QF_ABV (Single Query Track)

    - + 2019-Boolectorn 0 105825480.2125484.239105801058232716 @@ -461,7 +461,7 @@

    QF_ABV (Single Query Track)

    - + Yices2-fixedn 0 105340256.36240267.931105301053233227 @@ -470,7 +470,7 @@

    QF_ABV (Single Query Track)

    - + Yices2 0 105340258.92740270.596105301053233227 @@ -479,7 +479,7 @@

    QF_ABV (Single Query Track)

    - + MathSAT5n 0 103357315.41657311.785103301033235273 @@ -488,7 +488,7 @@

    QF_ABV (Single Query Track)

    - + CVC4 0 102662852.19962876.961102601026235976 @@ -497,7 +497,7 @@

    QF_ABV (Single Query Track)

    - + z3n 0 102369545.36469532.7441023010232362101 @@ -517,7 +517,7 @@

    QF_ABV (Single Query Track)

    - + 2019-Par4n 0 33283205.7562104.3793328229310355757 @@ -526,7 +526,7 @@

    QF_ABV (Single Query Track)

    - + Bitwuzla 0 33102913.4592923.1843310227710337575 @@ -535,7 +535,7 @@

    QF_ABV (Single Query Track)

    - + Bitwuzla-fixedn 0 33102936.4362929.1313310227710337575 @@ -544,7 +544,7 @@

    QF_ABV (Single Query Track)

    - + 2019-Boolectorn 0 33062991.1392982.3123306227510317979 @@ -553,7 +553,7 @@

    QF_ABV (Single Query Track)

    - + Yices2-fixedn 0 32833528.3683557.15328322691014102102 @@ -562,7 +562,7 @@

    QF_ABV (Single Query Track)

    - + Yices2 0 32833539.4393557.732328322691014102102 @@ -571,7 +571,7 @@

    QF_ABV (Single Query Track)

    - + CVC4 0 32316248.4076218.17732312232999154154 @@ -580,7 +580,7 @@

    QF_ABV (Single Query Track)

    - + z3n 0 32175664.535641.78832172222995168168 @@ -589,7 +589,7 @@

    QF_ABV (Single Query Track)

    - + MathSAT5n 0 32165929.5655895.60732162218998169168 @@ -613,7 +613,6 @@

    QF_ABV (Single Query Track)

    - + - diff --git a/archive/2020/results/qf-abv-unsat-core.html b/archive/2020/results/qf-abv-unsat-core.html index 2e873b16..6ca7b475 100644 --- a/archive/2020/results/qf-abv-unsat-core.html +++ b/archive/2020/results/qf-abv-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ABV (Unsat Core Track)

    Competition results for the QF_ABV - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_ABV (Unsat Core Track)

    Sequential PerformanceParallel Performance BitwuzlaBitwuzla - - + + @@ -126,7 +126,7 @@

    QF_ABV (Unsat Core Track)

    - + z3n 0 174340 @@ -137,7 +137,7 @@

    QF_ABV (Unsat Core Track)

    - + Bitwuzla 0 171321 @@ -148,7 +148,7 @@

    QF_ABV (Unsat Core Track)

    - + Yices2-fixedn 0 171147 @@ -159,7 +159,7 @@

    QF_ABV (Unsat Core Track)

    - + Yices2 0 171087 @@ -170,7 +170,7 @@

    QF_ABV (Unsat Core Track)

    - + Bitwuzla-fixedn 0 168993 @@ -181,7 +181,7 @@

    QF_ABV (Unsat Core Track)

    - + CVC4-uc 0 149406 @@ -192,7 +192,7 @@

    QF_ABV (Unsat Core Track)

    - + MathSAT5n 0 88 @@ -214,7 +214,7 @@

    QF_ABV (Unsat Core Track)

    - + z3n 0 17434040206.16740207.54228 @@ -223,7 +223,7 @@

    QF_ABV (Unsat Core Track)

    - + Bitwuzla 0 17132126903.29426844.69612 @@ -232,7 +232,7 @@

    QF_ABV (Unsat Core Track)

    - + Yices2-fixedn 0 17114730705.29630686.4119 @@ -241,7 +241,7 @@

    QF_ABV (Unsat Core Track)

    - + Yices2 0 17108730976.97930933.25220 @@ -250,7 +250,7 @@

    QF_ABV (Unsat Core Track)

    - + Bitwuzla-fixedn 0 16899326231.8926207.46213 @@ -259,7 +259,7 @@

    QF_ABV (Unsat Core Track)

    - + CVC4-uc 0 14940646147.07546144.86534 @@ -268,7 +268,7 @@

    QF_ABV (Unsat Core Track)

    - + MathSAT5n 0 8843.31243.1010 @@ -292,7 +292,6 @@

    QF_ABV (Unsat Core Track)

    - + - diff --git a/archive/2020/results/qf-abvfp-incremental.html b/archive/2020/results/qf-abvfp-incremental.html index dc8d1b5c..451778ee 100644 --- a/archive/2020/results/qf-abvfp-incremental.html +++ b/archive/2020/results/qf-abvfp-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ABVFP (Incremental Track)

    Competition results for the QF_ABVFP - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_ABVFP (Incremental Track)

    Parallel Performance Bitwuzla - - + + @@ -124,7 +124,7 @@

    QF_ABVFP (Incremental Track)

    - + Bitwuzla-fixedn 0 22259120.35959120.4664747 @@ -133,7 +133,7 @@

    QF_ABVFP (Incremental Track)

    - + Bitwuzla 0 22160221.93360222.0654848 @@ -142,7 +142,7 @@

    QF_ABVFP (Incremental Track)

    - + MathSAT5n 0 21960056.32160056.0615050 @@ -151,7 +151,7 @@

    QF_ABVFP (Incremental Track)

    - + CVC4-inc 0 21861310.55361310.2515151 @@ -175,7 +175,6 @@

    QF_ABVFP (Incremental Track)

    - + - diff --git a/archive/2020/results/qf-abvfp-single-query.html b/archive/2020/results/qf-abvfp-single-query.html index a30c81d3..64f9a7d5 100644 --- a/archive/2020/results/qf-abvfp-single-query.html +++ b/archive/2020/results/qf-abvfp-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ABVFP (Single Query Track)

    Competition results for the QF_ABVFP - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_ABVFP (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) BitwuzlaBitwuzlaBitwuzla - - + + Bitwuzla - - + + Bitwuzla - + @@ -131,7 +131,7 @@

    QF_ABVFP (Single Query Track)

    - + Bitwuzla-fixedn 0 499 @@ -142,7 +142,7 @@

    QF_ABVFP (Single Query Track)

    - + Bitwuzla 0 499 @@ -153,7 +153,7 @@

    QF_ABVFP (Single Query Track)

    - + MathSAT5n 0 493 @@ -164,7 +164,7 @@

    QF_ABVFP (Single Query Track)

    - + CVC4 0 489 @@ -175,7 +175,7 @@

    QF_ABVFP (Single Query Track)

    - + COLIBRI 0 406 @@ -197,7 +197,7 @@

    QF_ABVFP (Single Query Track)

    - + Bitwuzla-fixedn 0 4994265.5514260.7924999340611 @@ -206,7 +206,7 @@

    QF_ABVFP (Single Query Track)

    - + Bitwuzla 0 4994266.94267.8174999340611 @@ -215,7 +215,7 @@

    QF_ABVFP (Single Query Track)

    - + MathSAT5n 0 49312919.53312899.2784939340077 @@ -224,7 +224,7 @@

    QF_ABVFP (Single Query Track)

    - + CVC4 0 48918466.24418438.744489894001111 @@ -233,7 +233,7 @@

    QF_ABVFP (Single Query Track)

    - + COLIBRI 0 40626247.18726187.668406833239420 @@ -253,7 +253,7 @@

    QF_ABVFP (Single Query Track)

    - + Bitwuzla-fixedn 0 93239.038239.148939304071 @@ -262,7 +262,7 @@

    QF_ABVFP (Single Query Track)

    - + Bitwuzla 0 93241.17241.197939304071 @@ -271,7 +271,7 @@

    QF_ABVFP (Single Query Track)

    - + MathSAT5n 0 931237.1981231.695939304077 @@ -280,7 +280,7 @@

    QF_ABVFP (Single Query Track)

    - + CVC4 0 895613.3215613.4818989041111 @@ -289,7 +289,7 @@

    QF_ABVFP (Single Query Track)

    - + COLIBRI 0 833787.8813784.618383041720 @@ -309,7 +309,7 @@

    QF_ABVFP (Single Query Track)

    - + Bitwuzla-fixedn 0 4062826.5132821.6444060406941 @@ -318,7 +318,7 @@

    QF_ABVFP (Single Query Track)

    - + Bitwuzla 0 4062825.732826.624060406941 @@ -327,7 +327,7 @@

    QF_ABVFP (Single Query Track)

    - + MathSAT5n 0 40010482.33510467.58440004001007 @@ -336,7 +336,7 @@

    QF_ABVFP (Single Query Track)

    - + CVC4 0 40011652.92311625.264400040010011 @@ -345,7 +345,7 @@

    QF_ABVFP (Single Query Track)

    - + COLIBRI 0 32321529.5321473.239323032317720 @@ -365,7 +365,7 @@

    QF_ABVFP (Single Query Track)

    - + Bitwuzla 0 487876.742876.915487913961313 @@ -374,7 +374,7 @@

    QF_ABVFP (Single Query Track)

    - + Bitwuzla-fixedn 0 487883.833878.67487913961313 @@ -383,7 +383,7 @@

    QF_ABVFP (Single Query Track)

    - + MathSAT5n 0 4712104.0352083.028471853862929 @@ -392,7 +392,7 @@

    QF_ABVFP (Single Query Track)

    - + CVC4 0 4602286.9922258.707460833774040 @@ -401,7 +401,7 @@

    QF_ABVFP (Single Query Track)

    - + COLIBRI 0 4001395.611384.4494008231810027 @@ -425,7 +425,6 @@

    QF_ABVFP (Single Query Track)

    - + - diff --git a/archive/2020/results/qf-abvfp-unsat-core.html b/archive/2020/results/qf-abvfp-unsat-core.html index 7af17f87..b6b95595 100644 --- a/archive/2020/results/qf-abvfp-unsat-core.html +++ b/archive/2020/results/qf-abvfp-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ABVFP (Unsat Core Track)

    Competition results for the QF_ABVFP - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_ABVFP (Unsat Core Track)

    Sequential PerformanceParallel Performance BitwuzlaBitwuzla - - + + @@ -126,7 +126,7 @@

    QF_ABVFP (Unsat Core Track)

    - + Bitwuzla 0 13685 @@ -137,7 +137,7 @@

    QF_ABVFP (Unsat Core Track)

    - + Bitwuzla-fixedn 0 13680 @@ -148,7 +148,7 @@

    QF_ABVFP (Unsat Core Track)

    - + CVC4-uc 0 13219 @@ -159,7 +159,7 @@

    QF_ABVFP (Unsat Core Track)

    - + MathSAT5n 0 229 @@ -181,7 +181,7 @@

    QF_ABVFP (Unsat Core Track)

    - + Bitwuzla 0 136853529.2633495.3471 @@ -190,7 +190,7 @@

    QF_ABVFP (Unsat Core Track)

    - + Bitwuzla-fixedn 0 136803534.8843514.5881 @@ -199,7 +199,7 @@

    QF_ABVFP (Unsat Core Track)

    - + CVC4-uc 0 132198990.3898961.5354 @@ -208,7 +208,7 @@

    QF_ABVFP (Unsat Core Track)

    - + MathSAT5n 0 229163.443166.3710 @@ -232,7 +232,6 @@

    QF_ABVFP (Unsat Core Track)

    - + - diff --git a/archive/2020/results/qf-abvfplra-single-query.html b/archive/2020/results/qf-abvfplra-single-query.html index c143f080..e4de6ea6 100644 --- a/archive/2020/results/qf-abvfplra-single-query.html +++ b/archive/2020/results/qf-abvfplra-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ABVFPLRA (Single Query Track)

    Competition results for the QF_ABVFPLRA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_ABVFPLRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) CVC4CVC4CVC4 - - + + CVC4 - - + + COLIBRI - + @@ -131,7 +131,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + CVC4 0 70 @@ -142,7 +142,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + COLIBRI 0 68 @@ -153,7 +153,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + MathSAT5n 0 62 @@ -175,7 +175,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + CVC4 0 702081.5552081.8587068211 @@ -184,7 +184,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + COLIBRI 0 68111.089111.6026867130 @@ -193,7 +193,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + MathSAT5n 0 621223.7111223.7476262091 @@ -213,7 +213,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + CVC4 0 681291.4161291.4566868031 @@ -222,7 +222,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + COLIBRI 0 6788.69788.7846767040 @@ -231,7 +231,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + MathSAT5n 0 621223.5951223.6316262091 @@ -251,7 +251,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + CVC4 0 2790.138790.402202691 @@ -260,7 +260,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + COLIBRI 0 122.39222.818101700 @@ -269,7 +269,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + MathSAT5n 0 00.1160.116000711 @@ -289,7 +289,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + COLIBRI 0 6789.05489.5646766141 @@ -298,7 +298,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + CVC4 0 67145.191145.2166767044 @@ -307,7 +307,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + MathSAT5n 0 6247.71147.7476262091 @@ -331,7 +331,6 @@

    QF_ABVFPLRA (Single Query Track)

    - + - diff --git a/archive/2020/results/qf-alia-single-query.html b/archive/2020/results/qf-alia-single-query.html index e937dc58..a57116b4 100644 --- a/archive/2020/results/qf-alia-single-query.html +++ b/archive/2020/results/qf-alia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ALIA (Single Query Track)

    Competition results for the QF_ALIA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_ALIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Yices2Yices2Yices2 - - + + Yices2 - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_ALIA (Single Query Track)

    - + 2019-Yices 2.6.2n 0 116 @@ -142,7 +142,7 @@

    QF_ALIA (Single Query Track)

    - + Yices2-fixedn 0 116 @@ -153,7 +153,7 @@

    QF_ALIA (Single Query Track)

    - + Yices2 0 116 @@ -164,7 +164,7 @@

    QF_ALIA (Single Query Track)

    - + MathSAT5n 0 116 @@ -175,7 +175,7 @@

    QF_ALIA (Single Query Track)

    - + z3n 0 116 @@ -186,7 +186,7 @@

    QF_ALIA (Single Query Track)

    - + SMTInterpol 0 116 @@ -197,7 +197,7 @@

    QF_ALIA (Single Query Track)

    - + SMTInterpol-fixedn 0 116 @@ -208,7 +208,7 @@

    QF_ALIA (Single Query Track)

    - + CVC4 0 115 @@ -219,7 +219,7 @@

    QF_ALIA (Single Query Track)

    - + Alt-Ergo 0 51 @@ -230,7 +230,7 @@

    QF_ALIA (Single Query Track)

    - + veriT 0 6 @@ -252,7 +252,7 @@

    QF_ALIA (Single Query Track)

    - + 2019-Yices 2.6.2n 0 11656.22355.591116546200 @@ -261,7 +261,7 @@

    QF_ALIA (Single Query Track)

    - + Yices2 0 11669.14868.544116546200 @@ -270,7 +270,7 @@

    QF_ALIA (Single Query Track)

    - + Yices2-fixedn 0 11668.04368.548116546200 @@ -279,7 +279,7 @@

    QF_ALIA (Single Query Track)

    - + MathSAT5n 0 116102.499102.516116546200 @@ -288,7 +288,7 @@

    QF_ALIA (Single Query Track)

    - + SMTInterpol 0 1161036.445573.192116546200 @@ -297,7 +297,7 @@

    QF_ALIA (Single Query Track)

    - + SMTInterpol-fixedn 0 1161065.72575.918116546200 @@ -306,7 +306,7 @@

    QF_ALIA (Single Query Track)

    - + z3n 0 116621.208621.279116546200 @@ -315,7 +315,7 @@

    QF_ALIA (Single Query Track)

    - + CVC4 0 1155085.845083.408115546111 @@ -324,7 +324,7 @@

    QF_ALIA (Single Query Track)

    - + Alt-Ergo 0 5278373.5266856.456520526451 @@ -333,7 +333,7 @@

    QF_ALIA (Single Query Track)

    - + veriT 0 64.5644.6056061100 @@ -353,7 +353,7 @@

    QF_ALIA (Single Query Track)

    - + 2019-Yices 2.6.2n 0 5450.18749.18154540620 @@ -362,7 +362,7 @@

    QF_ALIA (Single Query Track)

    - + Yices2-fixedn 0 5461.33861.37454540620 @@ -371,7 +371,7 @@

    QF_ALIA (Single Query Track)

    - + Yices2 0 5462.47861.50754540620 @@ -380,7 +380,7 @@

    QF_ALIA (Single Query Track)

    - + MathSAT5n 0 5472.11772.12754540620 @@ -389,7 +389,7 @@

    QF_ALIA (Single Query Track)

    - + SMTInterpol 0 54501.103209.54254540620 @@ -398,7 +398,7 @@

    QF_ALIA (Single Query Track)

    - + SMTInterpol-fixedn 0 54526.189210.94854540620 @@ -407,7 +407,7 @@

    QF_ALIA (Single Query Track)

    - + z3n 0 54587.124587.18654540620 @@ -416,7 +416,7 @@

    QF_ALIA (Single Query Track)

    - + CVC4 0 542134.2052131.44854540621 @@ -425,7 +425,7 @@

    QF_ALIA (Single Query Track)

    - + veriT 0 02.5032.5050001160 @@ -434,7 +434,7 @@

    QF_ALIA (Single Query Track)

    - + Alt-Ergo 0 062758.23353942.88100011651 @@ -454,7 +454,7 @@

    QF_ALIA (Single Query Track)

    - + 2019-Yices 2.6.2n 0 626.0366.4162062540 @@ -463,7 +463,7 @@

    QF_ALIA (Single Query Track)

    - + Yices2 0 626.677.03762062540 @@ -472,7 +472,7 @@

    QF_ALIA (Single Query Track)

    - + Yices2-fixedn 0 626.7057.17462062540 @@ -481,7 +481,7 @@

    QF_ALIA (Single Query Track)

    - + MathSAT5n 0 6230.38130.38862062540 @@ -490,7 +490,7 @@

    QF_ALIA (Single Query Track)

    - + z3n 0 6234.08334.09262062540 @@ -499,7 +499,7 @@

    QF_ALIA (Single Query Track)

    - + SMTInterpol 0 62535.342363.6562062540 @@ -508,7 +508,7 @@

    QF_ALIA (Single Query Track)

    - + SMTInterpol-fixedn 0 62539.532364.9762062540 @@ -517,7 +517,7 @@

    QF_ALIA (Single Query Track)

    - + CVC4 0 612951.6352951.9661061551 @@ -526,7 +526,7 @@

    QF_ALIA (Single Query Track)

    - + Alt-Ergo 0 5215615.28712913.575520526451 @@ -535,7 +535,7 @@

    QF_ALIA (Single Query Track)

    - + veriT 0 62.0612.16061100 @@ -555,7 +555,7 @@

    QF_ALIA (Single Query Track)

    - + 2019-Yices 2.6.2n 0 11656.22355.591116546200 @@ -564,7 +564,7 @@

    QF_ALIA (Single Query Track)

    - + Yices2 0 11669.14868.544116546200 @@ -573,7 +573,7 @@

    QF_ALIA (Single Query Track)

    - + Yices2-fixedn 0 11668.04368.548116546200 @@ -582,7 +582,7 @@

    QF_ALIA (Single Query Track)

    - + MathSAT5n 0 11598.24198.257115536211 @@ -591,7 +591,7 @@

    QF_ALIA (Single Query Track)

    - + SMTInterpol 0 110769.866395.522110535766 @@ -600,7 +600,7 @@

    QF_ALIA (Single Query Track)

    - + SMTInterpol-fixedn 0 110796.708397.289110535766 @@ -609,7 +609,7 @@

    QF_ALIA (Single Query Track)

    - + z3n 0 107425.375425.413107456299 @@ -618,7 +618,7 @@

    QF_ALIA (Single Query Track)

    - + CVC4 0 95617.103613.8859541542121 @@ -627,7 +627,7 @@

    QF_ALIA (Single Query Track)

    - + Alt-Ergo 0 482152.7061716.808480486865 @@ -636,7 +636,7 @@

    QF_ALIA (Single Query Track)

    - + veriT 0 64.5644.6056061100 @@ -660,7 +660,6 @@

    QF_ALIA (Single Query Track)

    - + - diff --git a/archive/2020/results/qf-alia-unsat-core.html b/archive/2020/results/qf-alia-unsat-core.html index 6503ba44..2d6e9488 100644 --- a/archive/2020/results/qf-alia-unsat-core.html +++ b/archive/2020/results/qf-alia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ALIA (Unsat Core Track)

    Competition results for the QF_ALIA - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_ALIA (Unsat Core Track)

    Sequential PerformanceParallel Performance CVC4-ucCVC4-uc - - + + @@ -126,7 +126,7 @@

    QF_ALIA (Unsat Core Track)

    - + z3n 0 720 @@ -137,7 +137,7 @@

    QF_ALIA (Unsat Core Track)

    - + CVC4-uc 0 677 @@ -148,7 +148,7 @@

    QF_ALIA (Unsat Core Track)

    - + SMTInterpol-fixedn 0 644 @@ -159,7 +159,7 @@

    QF_ALIA (Unsat Core Track)

    - + SMTInterpol 0 644 @@ -170,7 +170,7 @@

    QF_ALIA (Unsat Core Track)

    - + MathSAT5n 0 583 @@ -181,7 +181,7 @@

    QF_ALIA (Unsat Core Track)

    - + Yices2-fixedn 0 564 @@ -192,7 +192,7 @@

    QF_ALIA (Unsat Core Track)

    - + Yices2 0 564 @@ -214,7 +214,7 @@

    QF_ALIA (Unsat Core Track)

    - + z3n 0 7201.2391.2410 @@ -223,7 +223,7 @@

    QF_ALIA (Unsat Core Track)

    - + CVC4-uc 0 6771.171.1820 @@ -232,7 +232,7 @@

    QF_ALIA (Unsat Core Track)

    - + SMTInterpol-fixedn 0 64421.24512.5640 @@ -241,7 +241,7 @@

    QF_ALIA (Unsat Core Track)

    - + SMTInterpol 0 64421.2812.5820 @@ -250,7 +250,7 @@

    QF_ALIA (Unsat Core Track)

    - + MathSAT5n 0 5830.7890.8230 @@ -259,7 +259,7 @@

    QF_ALIA (Unsat Core Track)

    - + Yices2-fixedn 0 5640.1550.380 @@ -268,7 +268,7 @@

    QF_ALIA (Unsat Core Track)

    - + Yices2 0 5640.1620.5260 @@ -292,7 +292,6 @@

    QF_ALIA (Unsat Core Track)

    - + - diff --git a/archive/2020/results/qf-ania-single-query.html b/archive/2020/results/qf-ania-single-query.html index 8cd4fa0c..54818552 100644 --- a/archive/2020/results/qf-ania-single-query.html +++ b/archive/2020/results/qf-ania-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ANIA (Single Query Track)

    Competition results for the QF_ANIA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_ANIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) CVC4CVC4CVC4 - - + + CVC4 - - + + CVC4 - + @@ -131,7 +131,7 @@

    QF_ANIA (Single Query Track)

    - + 2019-CVC4n 0 89 @@ -142,7 +142,7 @@

    QF_ANIA (Single Query Track)

    - + CVC4 0 89 @@ -153,7 +153,7 @@

    QF_ANIA (Single Query Track)

    - + MathSAT5n 0 87 @@ -164,7 +164,7 @@

    QF_ANIA (Single Query Track)

    - + z3n 0 55 @@ -175,7 +175,7 @@

    QF_ANIA (Single Query Track)

    - + Alt-Ergo 0 5 @@ -197,7 +197,7 @@

    QF_ANIA (Single Query Track)

    - + 2019-CVC4n 0 896886.3496886.6128982755 @@ -206,7 +206,7 @@

    QF_ANIA (Single Query Track)

    - + CVC4 0 897276.7397277.1738982755 @@ -215,7 +215,7 @@

    QF_ANIA (Single Query Track)

    - + MathSAT5n 0 87617.902617.9268780770 @@ -224,7 +224,7 @@

    QF_ANIA (Single Query Track)

    - + z3n 0 5551098.08651095.028554873939 @@ -233,7 +233,7 @@

    QF_ANIA (Single Query Track)

    - + Alt-Ergo 0 56288.9313497.395505892 @@ -253,7 +253,7 @@

    QF_ANIA (Single Query Track)

    - + 2019-CVC4n 0 825182.325182.41682820125 @@ -262,7 +262,7 @@

    QF_ANIA (Single Query Track)

    - + CVC4 0 825706.4325706.74882820125 @@ -271,7 +271,7 @@

    QF_ANIA (Single Query Track)

    - + MathSAT5n 0 8022.122.10680800140 @@ -280,7 +280,7 @@

    QF_ANIA (Single Query Track)

    - + z3n 0 4849869.03849865.974484804639 @@ -289,7 +289,7 @@

    QF_ANIA (Single Query Track)

    - + Alt-Ergo 0 02551.6762.06000942 @@ -309,7 +309,7 @@

    QF_ANIA (Single Query Track)

    - + MathSAT5n 0 7595.803595.82707870 @@ -318,7 +318,7 @@

    QF_ANIA (Single Query Track)

    - + z3n 0 71229.0481229.0537078739 @@ -327,7 +327,7 @@

    QF_ANIA (Single Query Track)

    - + CVC4 0 71570.3071570.425707875 @@ -336,7 +336,7 @@

    QF_ANIA (Single Query Track)

    - + 2019-CVC4n 0 71704.0281704.196707875 @@ -345,7 +345,7 @@

    QF_ANIA (Single Query Track)

    - + Alt-Ergo 0 53737.3312735.335505892 @@ -365,7 +365,7 @@

    QF_ANIA (Single Query Track)

    - + MathSAT5n 0 83129.641129.64983803114 @@ -374,7 +374,7 @@

    QF_ANIA (Single Query Track)

    - + 2019-CVC4n 0 81451.572451.569817831313 @@ -383,7 +383,7 @@

    QF_ANIA (Single Query Track)

    - + CVC4 0 81465.638465.71817831313 @@ -392,7 +392,7 @@

    QF_ANIA (Single Query Track)

    - + z3n 0 431278.6861274.934433675151 @@ -401,7 +401,7 @@

    QF_ANIA (Single Query Track)

    - + Alt-Ergo 0 31015.337395.973303916 @@ -425,7 +425,6 @@

    QF_ANIA (Single Query Track)

    - + - diff --git a/archive/2020/results/qf-aufbv-incremental.html b/archive/2020/results/qf-aufbv-incremental.html index 7b3af3ee..8b867751 100644 --- a/archive/2020/results/qf-aufbv-incremental.html +++ b/archive/2020/results/qf-aufbv-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AUFBV (Incremental Track)

    Competition results for the QF_AUFBV - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_AUFBV (Incremental Track)

    Parallel Performance Yices2 incremental - - + + @@ -124,7 +124,7 @@

    QF_AUFBV (Incremental Track)

    - + Yices2 incremental 0 94611073.67311073.482218 @@ -133,7 +133,7 @@

    QF_AUFBV (Incremental Track)

    - + Yices2-fixed incrementaln 0 94611087.59611087.459218 @@ -142,7 +142,7 @@

    QF_AUFBV (Incremental Track)

    - + 2019-Yices 2.6.2 Incrementaln 0 94511563.0711570.511229 @@ -151,7 +151,7 @@

    QF_AUFBV (Incremental Track)

    - + z3n 0 87316615.57716615.3519413 @@ -160,7 +160,7 @@

    QF_AUFBV (Incremental Track)

    - + MathSAT5n 0 79418877.84718877.81117314 @@ -169,7 +169,7 @@

    QF_AUFBV (Incremental Track)

    - + CVC4-inc 0 69320743.40320742.9327417 @@ -178,7 +178,7 @@

    QF_AUFBV (Incremental Track)

    - + Bitwuzla 0 123168.504168.6428440 @@ -187,7 +187,7 @@

    QF_AUFBV (Incremental Track)

    - + Bitwuzla-fixedn 0 123186.519186.658440 @@ -211,7 +211,6 @@

    QF_AUFBV (Incremental Track)

    - + - diff --git a/archive/2020/results/qf-aufbv-single-query.html b/archive/2020/results/qf-aufbv-single-query.html index 684e7ec9..dbd784f3 100644 --- a/archive/2020/results/qf-aufbv-single-query.html +++ b/archive/2020/results/qf-aufbv-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AUFBV (Single Query Track)

    Competition results for the QF_AUFBV - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_AUFBV (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Yices2Yices2Yices2 - - + + Yices2 - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_AUFBV (Single Query Track)

    - + Yices2-fixedn 0 25 @@ -142,7 +142,7 @@

    QF_AUFBV (Single Query Track)

    - + 2019-Yices 2.6.2n 0 25 @@ -153,7 +153,7 @@

    QF_AUFBV (Single Query Track)

    - + Yices2 0 25 @@ -164,7 +164,7 @@

    QF_AUFBV (Single Query Track)

    - + z3n 0 19 @@ -175,7 +175,7 @@

    QF_AUFBV (Single Query Track)

    - + MathSAT5n 0 17 @@ -186,7 +186,7 @@

    QF_AUFBV (Single Query Track)

    - + Bitwuzla 0 14 @@ -197,7 +197,7 @@

    QF_AUFBV (Single Query Track)

    - + Bitwuzla-fixedn 0 14 @@ -208,7 +208,7 @@

    QF_AUFBV (Single Query Track)

    - + CVC4 0 11 @@ -230,7 +230,7 @@

    QF_AUFBV (Single Query Track)

    - + Yices2-fixedn 0 2515018.19215018.9112581799 @@ -239,7 +239,7 @@

    QF_AUFBV (Single Query Track)

    - + 2019-Yices 2.6.2n 0 2515039.20615039.7712581799 @@ -248,7 +248,7 @@

    QF_AUFBV (Single Query Track)

    - + Yices2 0 2515128.22915128.7922581799 @@ -257,7 +257,7 @@

    QF_AUFBV (Single Query Track)

    - + z3n 0 1919210.2419210.607194151515 @@ -266,7 +266,7 @@

    QF_AUFBV (Single Query Track)

    - + MathSAT5n 0 1722768.63122769.113173141717 @@ -275,7 +275,7 @@

    QF_AUFBV (Single Query Track)

    - + Bitwuzla 0 145401.2915401.63914410204 @@ -284,7 +284,7 @@

    QF_AUFBV (Single Query Track)

    - + Bitwuzla-fixedn 0 145403.0095403.38814410204 @@ -293,7 +293,7 @@

    QF_AUFBV (Single Query Track)

    - + CVC4 0 1127709.92527709.926111102323 @@ -313,7 +313,7 @@

    QF_AUFBV (Single Query Track)

    - + Yices2-fixedn 0 87104.927105.556880269 @@ -322,7 +322,7 @@

    QF_AUFBV (Single Query Track)

    - + 2019-Yices 2.6.2n 0 87126.6627127.088880269 @@ -331,7 +331,7 @@

    QF_AUFBV (Single Query Track)

    - + Yices2 0 87128.947129.456880269 @@ -340,7 +340,7 @@

    QF_AUFBV (Single Query Track)

    - + Bitwuzla 0 4562.554562.814440304 @@ -349,7 +349,7 @@

    QF_AUFBV (Single Query Track)

    - + Bitwuzla-fixedn 0 4563.406563.636440304 @@ -358,7 +358,7 @@

    QF_AUFBV (Single Query Track)

    - + z3n 0 48767.4238767.624403015 @@ -367,7 +367,7 @@

    QF_AUFBV (Single Query Track)

    - + MathSAT5n 0 39766.9239766.9473303117 @@ -376,7 +376,7 @@

    QF_AUFBV (Single Query Track)

    - + CVC4 0 112081.03812081.041103323 @@ -396,7 +396,7 @@

    QF_AUFBV (Single Query Track)

    - + 2019-Yices 2.6.2n 0 173112.5443112.68317017179 @@ -405,7 +405,7 @@

    QF_AUFBV (Single Query Track)

    - + Yices2-fixedn 0 173113.2723113.35517017179 @@ -414,7 +414,7 @@

    QF_AUFBV (Single Query Track)

    - + Yices2 0 173199.2883199.33617017179 @@ -423,7 +423,7 @@

    QF_AUFBV (Single Query Track)

    - + z3n 0 155642.8175642.987150151915 @@ -432,7 +432,7 @@

    QF_AUFBV (Single Query Track)

    - + MathSAT5n 0 148201.7098202.167140142017 @@ -441,7 +441,7 @@

    QF_AUFBV (Single Query Track)

    - + Bitwuzla 0 1038.73738.82510010244 @@ -450,7 +450,7 @@

    QF_AUFBV (Single Query Track)

    - + Bitwuzla-fixedn 0 1039.60339.75210010244 @@ -459,7 +459,7 @@

    QF_AUFBV (Single Query Track)

    - + CVC4 0 1010828.88710828.886100102423 @@ -479,7 +479,7 @@

    QF_AUFBV (Single Query Track)

    - + 2019-Yices 2.6.2n 0 16468.288468.29162141818 @@ -488,7 +488,7 @@

    QF_AUFBV (Single Query Track)

    - + Yices2 0 16469.117469.13162141818 @@ -497,7 +497,7 @@

    QF_AUFBV (Single Query Track)

    - + Yices2-fixedn 0 16469.24469.245162141818 @@ -506,7 +506,7 @@

    QF_AUFBV (Single Query Track)

    - + z3n 0 13558.623558.631132112121 @@ -515,7 +515,7 @@

    QF_AUFBV (Single Query Track)

    - + MathSAT5n 0 11567.898567.914111102323 @@ -524,7 +524,7 @@

    QF_AUFBV (Single Query Track)

    - + Bitwuzla 0 10196.231196.3911019248 @@ -533,7 +533,7 @@

    QF_AUFBV (Single Query Track)

    - + Bitwuzla-fixedn 0 10196.191196.431019248 @@ -542,7 +542,7 @@

    QF_AUFBV (Single Query Track)

    - + CVC4 0 10604.887604.886100102424 @@ -566,7 +566,6 @@

    QF_AUFBV (Single Query Track)

    - + - diff --git a/archive/2020/results/qf-aufbv-unsat-core.html b/archive/2020/results/qf-aufbv-unsat-core.html index bf8fbc7e..ecc87e91 100644 --- a/archive/2020/results/qf-aufbv-unsat-core.html +++ b/archive/2020/results/qf-aufbv-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AUFBV (Unsat Core Track)

    Competition results for the QF_AUFBV - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_AUFBV (Unsat Core Track)

    Sequential PerformanceParallel Performance Yices2Yices2 - - + + @@ -126,7 +126,7 @@

    QF_AUFBV (Unsat Core Track)

    - + Yices2 0 18131 @@ -137,7 +137,7 @@

    QF_AUFBV (Unsat Core Track)

    - + Yices2-fixedn 0 18131 @@ -148,7 +148,7 @@

    QF_AUFBV (Unsat Core Track)

    - + z3n 0 16283 @@ -159,7 +159,7 @@

    QF_AUFBV (Unsat Core Track)

    - + Bitwuzla 0 15283 @@ -170,7 +170,7 @@

    QF_AUFBV (Unsat Core Track)

    - + Bitwuzla-fixedn 0 15283 @@ -181,7 +181,7 @@

    QF_AUFBV (Unsat Core Track)

    - + CVC4-uc 0 15248 @@ -192,7 +192,7 @@

    QF_AUFBV (Unsat Core Track)

    - + MathSAT5n 0 192 @@ -214,7 +214,7 @@

    QF_AUFBV (Unsat Core Track)

    - + Yices2 0 181314599.7874599.9483 @@ -223,7 +223,7 @@

    QF_AUFBV (Unsat Core Track)

    - + Yices2-fixedn 0 181314607.3534607.5863 @@ -232,7 +232,7 @@

    QF_AUFBV (Unsat Core Track)

    - + z3n 0 162836052.9826051.6454 @@ -241,7 +241,7 @@

    QF_AUFBV (Unsat Core Track)

    - + Bitwuzla 0 15283248.481248.6830 @@ -250,7 +250,7 @@

    QF_AUFBV (Unsat Core Track)

    - + Bitwuzla-fixedn 0 15283255.862249.50 @@ -259,7 +259,7 @@

    QF_AUFBV (Unsat Core Track)

    - + CVC4-uc 0 1524810858.44510858.4489 @@ -268,7 +268,7 @@

    QF_AUFBV (Unsat Core Track)

    - + MathSAT5n 0 1922.762.7940 @@ -292,7 +292,6 @@

    QF_AUFBV (Unsat Core Track)

    - + - diff --git a/archive/2020/results/qf-auflia-incremental.html b/archive/2020/results/qf-auflia-incremental.html index 0ce8f93d..e07496a6 100644 --- a/archive/2020/results/qf-auflia-incremental.html +++ b/archive/2020/results/qf-auflia-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AUFLIA (Incremental Track)

    Competition results for the QF_AUFLIA - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_AUFLIA (Incremental Track)

    Parallel Performance Yices2 incremental - - + + @@ -124,7 +124,7 @@

    QF_AUFLIA (Incremental Track)

    - + 2018-Yices (incremental)n 0 14555871219.6221124.0700 @@ -133,7 +133,7 @@

    QF_AUFLIA (Incremental Track)

    - + Yices2 incremental 0 14555871374.2831270.85400 @@ -142,7 +142,7 @@

    QF_AUFLIA (Incremental Track)

    - + Yices2-fixed incrementaln 0 14555871372.9971271.14300 @@ -151,7 +151,7 @@

    QF_AUFLIA (Incremental Track)

    - + z3n 0 14285652345.582228.202270221 @@ -160,7 +160,7 @@

    QF_AUFLIA (Incremental Track)

    - + SMTInterpol-fixedn 0 9324464171.4823686.3845231411 @@ -169,7 +169,7 @@

    QF_AUFLIA (Incremental Track)

    - + SMTInterpol 0 9321394147.2483667.8165234481 @@ -178,7 +178,7 @@

    QF_AUFLIA (Incremental Track)

    - + MathSAT5n 0 6424133353.1853277.728131741 @@ -187,7 +187,7 @@

    QF_AUFLIA (Incremental Track)

    - + CVC4-inc 0 2956716395.2066386.47111599164 @@ -211,7 +211,6 @@

    QF_AUFLIA (Incremental Track)

    - + - diff --git a/archive/2020/results/qf-auflia-single-query.html b/archive/2020/results/qf-auflia-single-query.html index 53522a3b..aa304207 100644 --- a/archive/2020/results/qf-auflia-single-query.html +++ b/archive/2020/results/qf-auflia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AUFLIA (Single Query Track)

    Competition results for the QF_AUFLIA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_AUFLIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Yices2Yices2Yices2 - - + + Yices2 - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_AUFLIA (Single Query Track)

    - + 2019-Yices 2.6.2n 0 467 @@ -142,7 +142,7 @@

    QF_AUFLIA (Single Query Track)

    - + Yices2-fixedn 0 467 @@ -153,7 +153,7 @@

    QF_AUFLIA (Single Query Track)

    - + Yices2 0 467 @@ -164,7 +164,7 @@

    QF_AUFLIA (Single Query Track)

    - + z3n 0 467 @@ -175,7 +175,7 @@

    QF_AUFLIA (Single Query Track)

    - + MathSAT5n 0 467 @@ -186,7 +186,7 @@

    QF_AUFLIA (Single Query Track)

    - + CVC4 0 467 @@ -197,7 +197,7 @@

    QF_AUFLIA (Single Query Track)

    - + SMTInterpol 0 467 @@ -208,7 +208,7 @@

    QF_AUFLIA (Single Query Track)

    - + SMTInterpol-fixedn 0 467 @@ -219,7 +219,7 @@

    QF_AUFLIA (Single Query Track)

    - + Alt-Ergo 0 221 @@ -230,7 +230,7 @@

    QF_AUFLIA (Single Query Track)

    - + veriT 0 51 @@ -252,7 +252,7 @@

    QF_AUFLIA (Single Query Track)

    - + 2019-Yices 2.6.2n 0 46712.96515.47746721525200 @@ -261,7 +261,7 @@

    QF_AUFLIA (Single Query Track)

    - + Yices2 0 46715.12817.39646721525200 @@ -270,7 +270,7 @@

    QF_AUFLIA (Single Query Track)

    - + Yices2-fixedn 0 46715.00218.33546721525200 @@ -279,7 +279,7 @@

    QF_AUFLIA (Single Query Track)

    - + z3n 0 46719.08419.14446721525200 @@ -288,7 +288,7 @@

    QF_AUFLIA (Single Query Track)

    - + MathSAT5n 0 46797.998.05746721525200 @@ -297,7 +297,7 @@

    QF_AUFLIA (Single Query Track)

    - + CVC4 0 467109.114109.22346721525200 @@ -306,7 +306,7 @@

    QF_AUFLIA (Single Query Track)

    - + SMTInterpol-fixedn 0 467609.553282.68446721525200 @@ -315,7 +315,7 @@

    QF_AUFLIA (Single Query Track)

    - + SMTInterpol 0 467602.999286.51446721525200 @@ -324,7 +324,7 @@

    QF_AUFLIA (Single Query Track)

    - + Alt-Ergo 0 237103047.77345753.21237023723021 @@ -333,7 +333,7 @@

    QF_AUFLIA (Single Query Track)

    - + veriT 0 516.1857.124510514160 @@ -353,7 +353,7 @@

    QF_AUFLIA (Single Query Track)

    - + Yices2 0 2153.764.91721521502520 @@ -362,7 +362,7 @@

    QF_AUFLIA (Single Query Track)

    - + 2019-Yices 2.6.2n 0 2153.634.9921521502520 @@ -371,7 +371,7 @@

    QF_AUFLIA (Single Query Track)

    - + Yices2-fixedn 0 2153.7115.52121521502520 @@ -380,7 +380,7 @@

    QF_AUFLIA (Single Query Track)

    - + z3n 0 2157.9487.97921521502520 @@ -389,7 +389,7 @@

    QF_AUFLIA (Single Query Track)

    - + MathSAT5n 0 21532.29832.35221521502520 @@ -398,7 +398,7 @@

    QF_AUFLIA (Single Query Track)

    - + CVC4 0 21566.97467.03321521502520 @@ -407,7 +407,7 @@

    QF_AUFLIA (Single Query Track)

    - + SMTInterpol-fixedn 0 215189.76498.30221521502520 @@ -416,7 +416,7 @@

    QF_AUFLIA (Single Query Track)

    - + SMTInterpol 0 215190.372101.93921521502520 @@ -425,7 +425,7 @@

    QF_AUFLIA (Single Query Track)

    - + veriT 0 02.8853.2870004670 @@ -434,7 +434,7 @@

    QF_AUFLIA (Single Query Track)

    - + Alt-Ergo 0 030366.26413967.27800046721 @@ -454,7 +454,7 @@

    QF_AUFLIA (Single Query Track)

    - + 2019-Yices 2.6.2n 0 2529.33610.48725202522150 @@ -463,7 +463,7 @@

    QF_AUFLIA (Single Query Track)

    - + z3n 0 25211.13611.16525202522150 @@ -472,7 +472,7 @@

    QF_AUFLIA (Single Query Track)

    - + Yices2 0 25211.36712.47925202522150 @@ -481,7 +481,7 @@

    QF_AUFLIA (Single Query Track)

    - + Yices2-fixedn 0 25211.29112.81425202522150 @@ -490,7 +490,7 @@

    QF_AUFLIA (Single Query Track)

    - + CVC4 0 25242.14142.1925202522150 @@ -499,7 +499,7 @@

    QF_AUFLIA (Single Query Track)

    - + MathSAT5n 0 25265.60365.70525202522150 @@ -508,7 +508,7 @@

    QF_AUFLIA (Single Query Track)

    - + SMTInterpol-fixedn 0 252419.789184.38225202522150 @@ -517,7 +517,7 @@

    QF_AUFLIA (Single Query Track)

    - + SMTInterpol 0 252412.627184.57525202522150 @@ -526,7 +526,7 @@

    QF_AUFLIA (Single Query Track)

    - + Alt-Ergo 0 23772681.50931785.932237023723021 @@ -535,7 +535,7 @@

    QF_AUFLIA (Single Query Track)

    - + veriT 0 513.3013.837510514160 @@ -555,7 +555,7 @@

    QF_AUFLIA (Single Query Track)

    - + 2019-Yices 2.6.2n 0 46712.96515.47746721525200 @@ -564,7 +564,7 @@

    QF_AUFLIA (Single Query Track)

    - + Yices2 0 46715.12817.39646721525200 @@ -573,7 +573,7 @@

    QF_AUFLIA (Single Query Track)

    - + Yices2-fixedn 0 46715.00218.33546721525200 @@ -582,7 +582,7 @@

    QF_AUFLIA (Single Query Track)

    - + z3n 0 46719.08419.14446721525200 @@ -591,7 +591,7 @@

    QF_AUFLIA (Single Query Track)

    - + MathSAT5n 0 46797.998.05746721525200 @@ -600,7 +600,7 @@

    QF_AUFLIA (Single Query Track)

    - + CVC4 0 467109.114109.22346721525200 @@ -609,7 +609,7 @@

    QF_AUFLIA (Single Query Track)

    - + SMTInterpol-fixedn 0 467609.553282.68446721525200 @@ -618,7 +618,7 @@

    QF_AUFLIA (Single Query Track)

    - + SMTInterpol 0 467602.999286.51446721525200 @@ -627,7 +627,7 @@

    QF_AUFLIA (Single Query Track)

    - + Alt-Ergo 0 1886835.1193942.3081880188279118 @@ -636,7 +636,7 @@

    QF_AUFLIA (Single Query Track)

    - + veriT 0 516.1857.124510514160 @@ -660,7 +660,6 @@

    QF_AUFLIA (Single Query Track)

    - + - diff --git a/archive/2020/results/qf-auflia-unsat-core.html b/archive/2020/results/qf-auflia-unsat-core.html index 1e5a40ec..7c103cc8 100644 --- a/archive/2020/results/qf-auflia-unsat-core.html +++ b/archive/2020/results/qf-auflia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AUFLIA (Unsat Core Track)

    Competition results for the QF_AUFLIA - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_AUFLIA (Unsat Core Track)

    Sequential PerformanceParallel Performance CVC4-ucCVC4-uc - - + + @@ -126,7 +126,7 @@

    QF_AUFLIA (Unsat Core Track)

    - + CVC4-uc 0 21515 @@ -137,7 +137,7 @@

    QF_AUFLIA (Unsat Core Track)

    - + z3n 0 21159 @@ -148,7 +148,7 @@

    QF_AUFLIA (Unsat Core Track)

    - + Yices2-fixedn 0 16040 @@ -159,7 +159,7 @@

    QF_AUFLIA (Unsat Core Track)

    - + Yices2 0 16040 @@ -170,7 +170,7 @@

    QF_AUFLIA (Unsat Core Track)

    - + SMTInterpol 0 1265 @@ -181,7 +181,7 @@

    QF_AUFLIA (Unsat Core Track)

    - + SMTInterpol-fixedn 0 1265 @@ -192,7 +192,7 @@

    QF_AUFLIA (Unsat Core Track)

    - + MathSAT5n 82* 1282 @@ -214,7 +214,7 @@

    QF_AUFLIA (Unsat Core Track)

    - + CVC4-uc 0 21515342.685343.1470 @@ -223,7 +223,7 @@

    QF_AUFLIA (Unsat Core Track)

    - + z3n 0 2115918.78418.8330 @@ -232,7 +232,7 @@

    QF_AUFLIA (Unsat Core Track)

    - + Yices2-fixedn 0 1604015.72717.4620 @@ -241,7 +241,7 @@

    QF_AUFLIA (Unsat Core Track)

    - + Yices2 0 1604015.84318.1020 @@ -250,7 +250,7 @@

    QF_AUFLIA (Unsat Core Track)

    - + SMTInterpol-fixedn 0 1265439.067195.9760 @@ -259,7 +259,7 @@

    QF_AUFLIA (Unsat Core Track)

    - + SMTInterpol 0 1265436.421196.3320 @@ -268,7 +268,7 @@

    QF_AUFLIA (Unsat Core Track)

    - + MathSAT5n 82* 128283.48783.7910 @@ -295,7 +295,6 @@

    QF_AUFLIA (Unsat Core Track)

    - + - diff --git a/archive/2020/results/qf-aufnia-single-query.html b/archive/2020/results/qf-aufnia-single-query.html index 03a4985c..2c4bd118 100644 --- a/archive/2020/results/qf-aufnia-single-query.html +++ b/archive/2020/results/qf-aufnia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AUFNIA (Single Query Track)

    Competition results for the QF_AUFNIA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_AUFNIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) CVC4CVC4CVC4 - - + + CVC4 - - + + CVC4 - + @@ -131,7 +131,7 @@

    QF_AUFNIA (Single Query Track)

    - + 2019-MathSAT-defaultn 0 9 @@ -142,7 +142,7 @@

    QF_AUFNIA (Single Query Track)

    - + MathSAT5n 0 9 @@ -153,7 +153,7 @@

    QF_AUFNIA (Single Query Track)

    - + z3n 0 9 @@ -164,7 +164,7 @@

    QF_AUFNIA (Single Query Track)

    - + CVC4 0 9 @@ -175,7 +175,7 @@

    QF_AUFNIA (Single Query Track)

    - + Alt-Ergo 0 7 @@ -197,7 +197,7 @@

    QF_AUFNIA (Single Query Track)

    - + 2019-MathSAT-defaultn 0 93.6533.65392700 @@ -206,7 +206,7 @@

    QF_AUFNIA (Single Query Track)

    - + MathSAT5n 0 93.8453.84592700 @@ -215,7 +215,7 @@

    QF_AUFNIA (Single Query Track)

    - + z3n 0 922.37622.37892700 @@ -224,7 +224,7 @@

    QF_AUFNIA (Single Query Track)

    - + CVC4 0 923.65623.66392700 @@ -233,7 +233,7 @@

    QF_AUFNIA (Single Query Track)

    - + Alt-Ergo 0 73371.743862.29470720 @@ -253,7 +253,7 @@

    QF_AUFNIA (Single Query Track)

    - + 2019-MathSAT-defaultn 0 20.540.5422070 @@ -262,7 +262,7 @@

    QF_AUFNIA (Single Query Track)

    - + MathSAT5n 0 20.560.5622070 @@ -271,7 +271,7 @@

    QF_AUFNIA (Single Query Track)

    - + CVC4 0 23.3433.34222070 @@ -280,7 +280,7 @@

    QF_AUFNIA (Single Query Track)

    - + z3n 0 221.39121.39322070 @@ -289,7 +289,7 @@

    QF_AUFNIA (Single Query Track)

    - + Alt-Ergo 0 0282.30383.60200090 @@ -309,7 +309,7 @@

    QF_AUFNIA (Single Query Track)

    - + z3n 0 70.9850.98570720 @@ -318,7 +318,7 @@

    QF_AUFNIA (Single Query Track)

    - + 2019-MathSAT-defaultn 0 73.1133.11370720 @@ -327,7 +327,7 @@

    QF_AUFNIA (Single Query Track)

    - + MathSAT5n 0 73.2853.28570720 @@ -336,7 +336,7 @@

    QF_AUFNIA (Single Query Track)

    - + CVC4 0 720.31420.3270720 @@ -345,7 +345,7 @@

    QF_AUFNIA (Single Query Track)

    - + Alt-Ergo 0 73089.44778.69170720 @@ -365,7 +365,7 @@

    QF_AUFNIA (Single Query Track)

    - + 2019-MathSAT-defaultn 0 93.6533.65392700 @@ -374,7 +374,7 @@

    QF_AUFNIA (Single Query Track)

    - + MathSAT5n 0 93.8453.84592700 @@ -383,7 +383,7 @@

    QF_AUFNIA (Single Query Track)

    - + z3n 0 922.37622.37892700 @@ -392,7 +392,7 @@

    QF_AUFNIA (Single Query Track)

    - + CVC4 0 923.65623.66392700 @@ -401,7 +401,7 @@

    QF_AUFNIA (Single Query Track)

    - + Alt-Ergo 0 1237.92203.63310188 @@ -425,7 +425,6 @@

    QF_AUFNIA (Single Query Track)

    - + - diff --git a/archive/2020/results/qf-ax-single-query.html b/archive/2020/results/qf-ax-single-query.html index 8b667459..bc7dd802 100644 --- a/archive/2020/results/qf-ax-single-query.html +++ b/archive/2020/results/qf-ax-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AX (Single Query Track)

    Competition results for the QF_AX - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_AX (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Yices2Yices2Yices2 - - + + Yices2 - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_AX (Single Query Track)

    - + Yices2-fixedn 0 300 @@ -142,7 +142,7 @@

    QF_AX (Single Query Track)

    - + Yices2 0 300 @@ -153,7 +153,7 @@

    QF_AX (Single Query Track)

    - + 2019-Yices 2.6.2n 0 300 @@ -164,7 +164,7 @@

    QF_AX (Single Query Track)

    - + 2018-Yicesn 0 300 @@ -175,7 +175,7 @@

    QF_AX (Single Query Track)

    - + MathSAT5n 0 300 @@ -186,7 +186,7 @@

    QF_AX (Single Query Track)

    - + z3n 0 300 @@ -197,7 +197,7 @@

    QF_AX (Single Query Track)

    - + CVC4 0 300 @@ -208,7 +208,7 @@

    QF_AX (Single Query Track)

    - + SMTInterpol 0 300 @@ -219,7 +219,7 @@

    QF_AX (Single Query Track)

    - + SMTInterpol-fixedn 0 300 @@ -230,7 +230,7 @@

    QF_AX (Single Query Track)

    - + Alt-Ergo 0 119 @@ -252,7 +252,7 @@

    QF_AX (Single Query Track)

    - + Yices2 0 3003.8456.25630016413600 @@ -261,7 +261,7 @@

    QF_AX (Single Query Track)

    - + Yices2-fixedn 0 3003.7416.30530016413600 @@ -270,7 +270,7 @@

    QF_AX (Single Query Track)

    - + 2019-Yices 2.6.2n 0 3004.3556.4130016413600 @@ -279,7 +279,7 @@

    QF_AX (Single Query Track)

    - + 2018-Yicesn 0 3004.5266.61130016413600 @@ -288,7 +288,7 @@

    QF_AX (Single Query Track)

    - + MathSAT5n 0 30019.96120.03330016413600 @@ -297,7 +297,7 @@

    QF_AX (Single Query Track)

    - + z3n 0 30023.20823.24230016413600 @@ -306,7 +306,7 @@

    QF_AX (Single Query Track)

    - + CVC4 0 30088.54388.60330016413600 @@ -315,7 +315,7 @@

    QF_AX (Single Query Track)

    - + SMTInterpol 0 300467.817207.53130016413600 @@ -324,7 +324,7 @@

    QF_AX (Single Query Track)

    - + SMTInterpol-fixedn 0 300470.203208.15730016413600 @@ -333,7 +333,7 @@

    QF_AX (Single Query Track)

    - + Alt-Ergo 0 12852274.55721535.03512801281729 @@ -353,7 +353,7 @@

    QF_AX (Single Query Track)

    - + Yices2 0 1640.9242.66316416401360 @@ -362,7 +362,7 @@

    QF_AX (Single Query Track)

    - + Yices2-fixedn 0 1640.8832.70316416401360 @@ -371,7 +371,7 @@

    QF_AX (Single Query Track)

    - + 2019-Yices 2.6.2n 0 1641.3132.73816416401360 @@ -380,7 +380,7 @@

    QF_AX (Single Query Track)

    - + 2018-Yicesn 0 1641.3642.77516416401360 @@ -389,7 +389,7 @@

    QF_AX (Single Query Track)

    - + MathSAT5n 0 1648.2448.28816416401360 @@ -398,7 +398,7 @@

    QF_AX (Single Query Track)

    - + z3n 0 1648.9028.92316416401360 @@ -407,7 +407,7 @@

    QF_AX (Single Query Track)

    - + CVC4 0 16410.11310.18116416401360 @@ -416,7 +416,7 @@

    QF_AX (Single Query Track)

    - + SMTInterpol-fixedn 0 164131.35569.41216416401360 @@ -425,7 +425,7 @@

    QF_AX (Single Query Track)

    - + SMTInterpol 0 164131.85669.47316416401360 @@ -434,7 +434,7 @@

    QF_AX (Single Query Track)

    - + Alt-Ergo 0 08158.1013254.7060003009 @@ -454,7 +454,7 @@

    QF_AX (Single Query Track)

    - + Yices2 0 1362.9213.59313601361640 @@ -463,7 +463,7 @@

    QF_AX (Single Query Track)

    - + Yices2-fixedn 0 1362.8593.60213601361640 @@ -472,7 +472,7 @@

    QF_AX (Single Query Track)

    - + 2019-Yices 2.6.2n 0 1363.0423.67113601361640 @@ -481,7 +481,7 @@

    QF_AX (Single Query Track)

    - + 2018-Yicesn 0 1363.1623.83613601361640 @@ -490,7 +490,7 @@

    QF_AX (Single Query Track)

    - + MathSAT5n 0 13611.71711.74413601361640 @@ -499,7 +499,7 @@

    QF_AX (Single Query Track)

    - + z3n 0 13614.30614.31913601361640 @@ -508,7 +508,7 @@

    QF_AX (Single Query Track)

    - + CVC4 0 13678.42978.42113601361640 @@ -517,7 +517,7 @@

    QF_AX (Single Query Track)

    - + SMTInterpol 0 136335.962138.05713601361640 @@ -526,7 +526,7 @@

    QF_AX (Single Query Track)

    - + SMTInterpol-fixedn 0 136338.848138.74513601361640 @@ -535,7 +535,7 @@

    QF_AX (Single Query Track)

    - + Alt-Ergo 0 12844116.45518280.32912801281729 @@ -555,7 +555,7 @@

    QF_AX (Single Query Track)

    - + Yices2 0 3003.8456.25630016413600 @@ -564,7 +564,7 @@

    QF_AX (Single Query Track)

    - + Yices2-fixedn 0 3003.7416.30530016413600 @@ -573,7 +573,7 @@

    QF_AX (Single Query Track)

    - + 2019-Yices 2.6.2n 0 3004.3556.4130016413600 @@ -582,7 +582,7 @@

    QF_AX (Single Query Track)

    - + 2018-Yicesn 0 3004.5266.61130016413600 @@ -591,7 +591,7 @@

    QF_AX (Single Query Track)

    - + MathSAT5n 0 30019.96120.03330016413600 @@ -600,7 +600,7 @@

    QF_AX (Single Query Track)

    - + z3n 0 30023.20823.24230016413600 @@ -609,7 +609,7 @@

    QF_AX (Single Query Track)

    - + CVC4 0 30088.54388.60330016413600 @@ -618,7 +618,7 @@

    QF_AX (Single Query Track)

    - + SMTInterpol 0 300467.817207.53130016413600 @@ -627,7 +627,7 @@

    QF_AX (Single Query Track)

    - + SMTInterpol-fixedn 0 300470.203208.15730016413600 @@ -636,7 +636,7 @@

    QF_AX (Single Query Track)

    - + Alt-Ergo 0 864173.792452.3758608621476 @@ -660,7 +660,6 @@

    QF_AX (Single Query Track)

    - + - diff --git a/archive/2020/results/qf-ax-unsat-core.html b/archive/2020/results/qf-ax-unsat-core.html index de69e52a..a3e01233 100644 --- a/archive/2020/results/qf-ax-unsat-core.html +++ b/archive/2020/results/qf-ax-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AX (Unsat Core Track)

    Competition results for the QF_AX - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_AX (Unsat Core Track)

    Sequential PerformanceParallel Performance Yices2Yices2 - - + + @@ -126,7 +126,7 @@

    QF_AX (Unsat Core Track)

    - + Yices2-fixedn 0 38650 @@ -137,7 +137,7 @@

    QF_AX (Unsat Core Track)

    - + Yices2 0 38650 @@ -148,7 +148,7 @@

    QF_AX (Unsat Core Track)

    - + z3n 0 38420 @@ -159,7 +159,7 @@

    QF_AX (Unsat Core Track)

    - + CVC4-uc 0 27874 @@ -170,7 +170,7 @@

    QF_AX (Unsat Core Track)

    - + MathSAT5n 0 2672 @@ -181,7 +181,7 @@

    QF_AX (Unsat Core Track)

    - + SMTInterpol 0 471 @@ -192,7 +192,7 @@

    QF_AX (Unsat Core Track)

    - + SMTInterpol-fixedn 0 471 @@ -214,7 +214,7 @@

    QF_AX (Unsat Core Track)

    - + Yices2-fixedn 0 386505.566.3510 @@ -223,7 +223,7 @@

    QF_AX (Unsat Core Track)

    - + Yices2 0 386505.6466.8570 @@ -232,7 +232,7 @@

    QF_AX (Unsat Core Track)

    - + z3n 0 3842025.34525.380 @@ -241,7 +241,7 @@

    QF_AX (Unsat Core Track)

    - + CVC4-uc 0 278741757.4621757.6671 @@ -250,7 +250,7 @@

    QF_AX (Unsat Core Track)

    - + MathSAT5n 0 267217.7417.8260 @@ -259,7 +259,7 @@

    QF_AX (Unsat Core Track)

    - + SMTInterpol 0 471384.201163.3360 @@ -268,7 +268,7 @@

    QF_AX (Unsat Core Track)

    - + SMTInterpol-fixedn 0 471400.656166.4940 @@ -292,7 +292,6 @@

    QF_AX (Unsat Core Track)

    - + - diff --git a/archive/2020/results/qf-bv-incremental.html b/archive/2020/results/qf-bv-incremental.html index 33a42093..9a2db748 100644 --- a/archive/2020/results/qf-bv-incremental.html +++ b/archive/2020/results/qf-bv-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_BV (Incremental Track)

    Competition results for the QF_BV - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_BV (Incremental Track)

    Parallel Performance Yices2 incremental - - + + @@ -124,7 +124,7 @@

    QF_BV (Incremental Track)

    - + Yices2-fixed incrementaln 0 1866920835.30520741.55539511 @@ -133,7 +133,7 @@

    QF_BV (Incremental Track)

    - + Yices2 incremental 0 1866720923.31420740.38439711 @@ -142,7 +142,7 @@

    QF_BV (Incremental Track)

    - + 2019-Yices 2.6.2 Incrementaln 0 1864321069.4721034.23542112 @@ -151,7 +151,7 @@

    QF_BV (Incremental Track)

    - + STP + CMS 0 1863619265.60816123.70842811 @@ -160,7 +160,7 @@

    QF_BV (Incremental Track)

    - + Bitwuzla-fixedn 0 1861419550.61319481.8894508 @@ -169,7 +169,7 @@

    QF_BV (Incremental Track)

    - + Bitwuzla 0 1860219772.6119630.4884628 @@ -178,7 +178,7 @@

    QF_BV (Incremental Track)

    - + STP + MergeSAT 0 1858616525.96316446.02747811 @@ -187,7 +187,7 @@

    QF_BV (Incremental Track)

    - + z3n 0 1856843808.15843725.49149615 @@ -196,7 +196,7 @@

    QF_BV (Incremental Track)

    - + MathSAT5n 0 1855131162.94831095.71251315 @@ -205,7 +205,7 @@

    QF_BV (Incremental Track)

    - + CVC4-inc 0 1846660912.7360836.53759811 @@ -214,7 +214,7 @@

    QF_BV (Incremental Track)

    - + LazyBV2Int 0 14781333740.613333648.3514283258 @@ -238,7 +238,6 @@

    QF_BV (Incremental Track)

    - + - diff --git a/archive/2020/results/qf-bv-model-validation.html b/archive/2020/results/qf-bv-model-validation.html index ea249582..88338623 100644 --- a/archive/2020/results/qf-bv-model-validation.html +++ b/archive/2020/results/qf-bv-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_BV (Model Validation Track)

    Competition results for the QF_BV - + division - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_BV (Model Validation Track)

    Sequential PerformanceParallel Performance BitwuzlaBitwuzla - - + + @@ -126,7 +126,7 @@

    QF_BV (Model Validation Track)

    - + Bitwuzla 0 5731 @@ -137,7 +137,7 @@

    QF_BV (Model Validation Track)

    - + Bitwuzla-fixedn 0 5730 @@ -148,7 +148,7 @@

    QF_BV (Model Validation Track)

    - + Yices2-fixed Model Validationn 0 5728 @@ -159,7 +159,7 @@

    QF_BV (Model Validation Track)

    - + 2019-Boolectorn 0 5717 @@ -170,7 +170,7 @@

    QF_BV (Model Validation Track)

    - + CVC4-mv 0 5693 @@ -181,7 +181,7 @@

    QF_BV (Model Validation Track)

    - + STP + MergeSAT 0 5675 @@ -192,7 +192,7 @@

    QF_BV (Model Validation Track)

    - + STP + CMS 0 5634 @@ -203,7 +203,7 @@

    QF_BV (Model Validation Track)

    - + z3n 1 5552 @@ -214,7 +214,7 @@

    QF_BV (Model Validation Track)

    - + Yices2 Model Validation 43 5600 @@ -225,7 +225,7 @@

    QF_BV (Model Validation Track)

    - + MathSAT5-mvn 104* 5321 @@ -247,7 +247,7 @@

    QF_BV (Model Validation Track)

    - + Bitwuzla 0 573183070.36982945.12121 @@ -256,7 +256,7 @@

    QF_BV (Model Validation Track)

    - + Bitwuzla-fixedn 0 573082748.28682629.74522 @@ -265,7 +265,7 @@

    QF_BV (Model Validation Track)

    - + Yices2-fixed Model Validationn 0 572883591.02983459.7924 @@ -274,7 +274,7 @@

    QF_BV (Model Validation Track)

    - + 2019-Boolectorn 0 571792323.19692690.33235 @@ -283,7 +283,7 @@

    QF_BV (Model Validation Track)

    - + STP + CMS 0 5716317862.725119317.81734 @@ -292,7 +292,7 @@

    QF_BV (Model Validation Track)

    - + CVC4-mv 0 5693181038.304180811.16251 @@ -301,7 +301,7 @@

    QF_BV (Model Validation Track)

    - + STP + MergeSAT 0 5675181450.708181942.90375 @@ -310,7 +310,7 @@

    QF_BV (Model Validation Track)

    - + z3n 1 5552443732.334443467.755199 @@ -319,7 +319,7 @@

    QF_BV (Model Validation Track)

    - + Yices2 Model Validation 43 5600178503.118178412.985109 @@ -328,7 +328,7 @@

    QF_BV (Model Validation Track)

    - + MathSAT5-mvn 104* 5321545105.872544905.352324 @@ -355,7 +355,6 @@

    QF_BV (Model Validation Track)

    - + - diff --git a/archive/2020/results/qf-bv-single-query.html b/archive/2020/results/qf-bv-single-query.html index 6f338bf6..257ea75f 100644 --- a/archive/2020/results/qf-bv-single-query.html +++ b/archive/2020/results/qf-bv-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_BV (Single Query Track)

    Competition results for the QF_BV - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_BV (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) BitwuzlaBitwuzlaBitwuzla - - + + Bitwuzla - - + + Bitwuzla - + @@ -131,7 +131,7 @@

    QF_BV (Single Query Track)

    - + Bitwuzla-fixedn 0 6731 @@ -142,7 +142,7 @@

    QF_BV (Single Query Track)

    - + Bitwuzla 0 6730 @@ -153,7 +153,7 @@

    QF_BV (Single Query Track)

    - + Yices2-fixedn 0 6719 @@ -164,7 +164,7 @@

    QF_BV (Single Query Track)

    - + 2019-Boolectorn 0 6710 @@ -175,7 +175,7 @@

    QF_BV (Single Query Track)

    - + 2019-Poolectorn 0 6632 @@ -186,7 +186,7 @@

    QF_BV (Single Query Track)

    - + MinkeyRink-fixedn 0 6610 @@ -197,7 +197,7 @@

    QF_BV (Single Query Track)

    - + STP + CMS 0 6580 @@ -208,7 +208,7 @@

    QF_BV (Single Query Track)

    - + CVC4 0 6329 @@ -219,7 +219,7 @@

    QF_BV (Single Query Track)

    - + MathSAT5n 0 5967 @@ -230,7 +230,7 @@

    QF_BV (Single Query Track)

    - + z3n 0 5905 @@ -241,7 +241,7 @@

    QF_BV (Single Query Track)

    - + LazyBV2Int 0 3248 @@ -252,7 +252,7 @@

    QF_BV (Single Query Track)

    - + MinkeyRink 0 2120 @@ -263,7 +263,7 @@

    QF_BV (Single Query Track)

    - + STP + MergeSAT 2 6633 @@ -274,7 +274,7 @@

    QF_BV (Single Query Track)

    - + Yices2 32 6578 @@ -296,7 +296,7 @@

    QF_BV (Single Query Track)

    - + Bitwuzla-fixedn 0 6731276009.913275768.178673125294202130130 @@ -305,7 +305,7 @@

    QF_BV (Single Query Track)

    - + Bitwuzla 0 6730276345.813276075.568673025284202131131 @@ -314,7 +314,7 @@

    QF_BV (Single Query Track)

    - + 2019-Poolectorn 0 6721601905.825278442.308672125244197140140 @@ -323,7 +323,7 @@

    QF_BV (Single Query Track)

    - + Yices2-fixedn 0 6719284373.409284013.912671925204199142142 @@ -332,7 +332,7 @@

    QF_BV (Single Query Track)

    - + 2019-Boolectorn 0 6710310030.086309784.927671025234187151151 @@ -341,7 +341,7 @@

    QF_BV (Single Query Track)

    - + MinkeyRink-fixedn 0 6690561747.841294426.483669025284162171164 @@ -350,7 +350,7 @@

    QF_BV (Single Query Track)

    - + STP + CMS 0 6689642484.468319011.01668925034186172164 @@ -359,7 +359,7 @@

    QF_BV (Single Query Track)

    - + CVC4 0 6329911688.83911121.552632924773852532526 @@ -368,7 +368,7 @@

    QF_BV (Single Query Track)

    - + MathSAT5n 0 59671388964.6571388584.66596721923775894893 @@ -377,7 +377,7 @@

    QF_BV (Single Query Track)

    - + z3n 0 59051544785.7191544502.874590523413564956956 @@ -386,7 +386,7 @@

    QF_BV (Single Query Track)

    - + LazyBV2Int 0 32474596373.6384596276.6133247560268736143612 @@ -395,7 +395,7 @@

    QF_BV (Single Query Track)

    - + MinkeyRink 0 21209364.5119311.779212011210947410 @@ -404,7 +404,7 @@

    QF_BV (Single Query Track)

    - + STP + MergeSAT 2 6633439386.662439583.558663324594174228224 @@ -413,7 +413,7 @@

    QF_BV (Single Query Track)

    - + Yices2 32 6578402053.912401879.167657823984180283251 @@ -433,7 +433,7 @@

    QF_BV (Single Query Track)

    - + Bitwuzla-fixedn 0 2529102591.669102421.8212529252904332130 @@ -442,7 +442,7 @@

    QF_BV (Single Query Track)

    - + MinkeyRink-fixedn 0 2528224141.688953.4532528252804333164 @@ -451,7 +451,7 @@

    QF_BV (Single Query Track)

    - + Bitwuzla 0 2528102851.034102706.2492528252804333131 @@ -460,7 +460,7 @@

    QF_BV (Single Query Track)

    - + 2019-Poolectorn 0 2524285094.752105528.6722524252404337140 @@ -469,7 +469,7 @@

    QF_BV (Single Query Track)

    - + 2019-Boolectorn 0 2523116830.844116658.8912523252304338151 @@ -478,7 +478,7 @@

    QF_BV (Single Query Track)

    - + Yices2-fixedn 0 2520108990.017108753.12520252004341142 @@ -487,7 +487,7 @@

    QF_BV (Single Query Track)

    - + STP + CMS 0 2503341801.786141324.1332503250304358164 @@ -496,7 +496,7 @@

    QF_BV (Single Query Track)

    - + CVC4 0 2477221216.036220926.5662477247704384526 @@ -505,7 +505,7 @@

    QF_BV (Single Query Track)

    - + STP + MergeSAT 0 2459203940.205204383.1862459245904402224 @@ -514,7 +514,7 @@

    QF_BV (Single Query Track)

    - + z3n 0 2341491381.274491312.4922341234104520956 @@ -523,7 +523,7 @@

    QF_BV (Single Query Track)

    - + MathSAT5n 0 2192604638.661604392.1192192219204669893 @@ -532,7 +532,7 @@

    QF_BV (Single Query Track)

    - + LazyBV2Int 0 5602573664.9142573681.176560560063013612 @@ -541,7 +541,7 @@

    QF_BV (Single Query Track)

    - + MinkeyRink 0 117036.846987.7321111068500 @@ -550,7 +550,7 @@

    QF_BV (Single Query Track)

    - + Yices2 32 2398207402.541207339.8252398239804463251 @@ -570,7 +570,7 @@

    QF_BV (Single Query Track)

    - + Bitwuzla-fixedn 0 4202127818.244127746.3574202042022659130 @@ -579,7 +579,7 @@

    QF_BV (Single Query Track)

    - + Bitwuzla 0 4202127894.779127769.3184202042022659131 @@ -588,7 +588,7 @@

    QF_BV (Single Query Track)

    - + Yices2-fixedn 0 4199129783.392129660.8124199041992662142 @@ -597,7 +597,7 @@

    QF_BV (Single Query Track)

    - + 2019-Poolectorn 0 4197271211.074127313.6364197041972664140 @@ -606,7 +606,7 @@

    QF_BV (Single Query Track)

    - + 2019-Boolectorn 0 4187147599.242147526.0364187041872674151 @@ -615,7 +615,7 @@

    QF_BV (Single Query Track)

    - + STP + CMS 0 4186255082.681132086.8774186041862675164 @@ -624,7 +624,7 @@

    QF_BV (Single Query Track)

    - + Yices2 0 4180149051.371148939.3424180041802681251 @@ -633,7 +633,7 @@

    QF_BV (Single Query Track)

    - + MinkeyRink-fixedn 0 4162292006.241159873.034162041622699164 @@ -642,7 +642,7 @@

    QF_BV (Single Query Track)

    - + CVC4 0 3852644872.794644594.9863852038523009526 @@ -651,7 +651,7 @@

    QF_BV (Single Query Track)

    - + MathSAT5n 0 3775738725.996738592.5413775037753086893 @@ -660,7 +660,7 @@

    QF_BV (Single Query Track)

    - + z3n 0 35641007804.4451007590.3823564035643297956 @@ -669,7 +669,7 @@

    QF_BV (Single Query Track)

    - + LazyBV2Int 0 26871977108.7231976995.43626870268741743612 @@ -678,7 +678,7 @@

    QF_BV (Single Query Track)

    - + MinkeyRink 0 21091918.4081914.52721090210947520 @@ -687,7 +687,7 @@

    QF_BV (Single Query Track)

    - + STP + MergeSAT 2 4174189846.457189600.3734174041742687224 @@ -707,7 +707,7 @@

    QF_BV (Single Query Track)

    - + 2019-Poolectorn 0 621172668.19130874.689621122463965650650 @@ -716,7 +716,7 @@

    QF_BV (Single Query Track)

    - + MinkeyRink-fixedn 0 620142927.46625104.899620122553946660660 @@ -725,7 +725,7 @@

    QF_BV (Single Query Track)

    - + Bitwuzla 0 619032777.90432530.517619022303960671671 @@ -734,7 +734,7 @@

    QF_BV (Single Query Track)

    - + Bitwuzla-fixedn 0 619032790.10332597.973619022293961671671 @@ -743,7 +743,7 @@

    QF_BV (Single Query Track)

    - + Yices2-fixedn 0 618224605.95624517.889618221684014679679 @@ -752,7 +752,7 @@

    QF_BV (Single Query Track)

    - + 2019-Boolectorn 0 618131794.44431669.739618122233958680680 @@ -761,7 +761,7 @@

    QF_BV (Single Query Track)

    - + STP + CMS 0 608245870.65629734.296608221493933779779 @@ -770,7 +770,7 @@

    QF_BV (Single Query Track)

    - + STP + MergeSAT 0 596738364.56538117.104596721393828894894 @@ -779,7 +779,7 @@

    QF_BV (Single Query Track)

    - + CVC4 0 500568528.06368179.95750051727327818561853 @@ -788,7 +788,7 @@

    QF_BV (Single Query Track)

    - + MathSAT5n 0 487360047.39559892.88648731552332119881988 @@ -797,7 +797,7 @@

    QF_BV (Single Query Track)

    - + z3n 0 444872205.01771999.2244481487296124132413 @@ -806,7 +806,7 @@

    QF_BV (Single Query Track)

    - + LazyBV2Int 0 2323113185.778113123.0742323164215945384536 @@ -815,7 +815,7 @@

    QF_BV (Single Query Track)

    - + MinkeyRink 0 21205852.9295799.1392120112109474137 @@ -824,7 +824,7 @@

    QF_BV (Single Query Track)

    - + Yices2 32 608925474.63525380.35608920844005772740 @@ -848,7 +848,6 @@

    QF_BV (Single Query Track)

    - + - diff --git a/archive/2020/results/qf-bv-unsat-core.html b/archive/2020/results/qf-bv-unsat-core.html index 185604f4..a9d1b792 100644 --- a/archive/2020/results/qf-bv-unsat-core.html +++ b/archive/2020/results/qf-bv-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_BV (Unsat Core Track)

    Competition results for the QF_BV - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_BV (Unsat Core Track)

    Sequential PerformanceParallel Performance BitwuzlaBitwuzla - - + + @@ -126,7 +126,7 @@

    QF_BV (Unsat Core Track)

    - + Bitwuzla-fixedn 0 1435217 @@ -137,7 +137,7 @@

    QF_BV (Unsat Core Track)

    - + Bitwuzla 0 1435217 @@ -148,7 +148,7 @@

    QF_BV (Unsat Core Track)

    - + Yices2-fixedn 0 1420381 @@ -159,7 +159,7 @@

    QF_BV (Unsat Core Track)

    - + Yices2 0 1420381 @@ -170,7 +170,7 @@

    QF_BV (Unsat Core Track)

    - + CVC4-uc 0 1190503 @@ -181,7 +181,7 @@

    QF_BV (Unsat Core Track)

    - + z3n 0 1187885 @@ -192,7 +192,7 @@

    QF_BV (Unsat Core Track)

    - + MathSAT5n 0 0 @@ -214,7 +214,7 @@

    QF_BV (Unsat Core Track)

    - + Bitwuzla-fixedn 0 1435217177602.435177532.027100 @@ -223,7 +223,7 @@

    QF_BV (Unsat Core Track)

    - + Bitwuzla 0 1435217177686.714177641.8599 @@ -232,7 +232,7 @@

    QF_BV (Unsat Core Track)

    - + Yices2-fixedn 0 1420381244499.083244413.936189 @@ -241,7 +241,7 @@

    QF_BV (Unsat Core Track)

    - + Yices2 0 1420381244651.498244640.32189 @@ -250,7 +250,7 @@

    QF_BV (Unsat Core Track)

    - + CVC4-uc 0 11905031099137.8891099061.265801 @@ -259,7 +259,7 @@

    QF_BV (Unsat Core Track)

    - + z3n 0 11878851095565.7771095353.25814 @@ -268,7 +268,7 @@

    QF_BV (Unsat Core Track)

    - + MathSAT5n 0 0285.525286.0180 @@ -292,7 +292,6 @@

    QF_BV (Unsat Core Track)

    - + - diff --git a/archive/2020/results/qf-bvfp-incremental.html b/archive/2020/results/qf-bvfp-incremental.html index 047fe2d9..3c72223f 100644 --- a/archive/2020/results/qf-bvfp-incremental.html +++ b/archive/2020/results/qf-bvfp-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_BVFP (Incremental Track)

    Competition results for the QF_BVFP - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_BVFP (Incremental Track)

    Parallel Performance Bitwuzla - - + + @@ -124,7 +124,7 @@

    QF_BVFP (Incremental Track)

    - + Bitwuzla-fixedn 0 15421756.17921757.0331414 @@ -133,7 +133,7 @@

    QF_BVFP (Incremental Track)

    - + Bitwuzla 0 15222354.49522316.161616 @@ -142,7 +142,7 @@

    QF_BVFP (Incremental Track)

    - + MathSAT5n 0 14830176.28430176.1552020 @@ -151,7 +151,7 @@

    QF_BVFP (Incremental Track)

    - + CVC4-inc 0 14433029.28833018.2992422 @@ -160,7 +160,7 @@

    QF_BVFP (Incremental Track)

    - + z3n 0 11861336.861338.1045044 @@ -184,7 +184,6 @@

    QF_BVFP (Incremental Track)

    - + - diff --git a/archive/2020/results/qf-bvfp-single-query.html b/archive/2020/results/qf-bvfp-single-query.html index 13f3aaaa..c9ceac3d 100644 --- a/archive/2020/results/qf-bvfp-single-query.html +++ b/archive/2020/results/qf-bvfp-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_BVFP (Single Query Track)

    Competition results for the QF_BVFP - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_BVFP (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) BitwuzlaBitwuzlaBitwuzla - - + + Bitwuzla - - + + Bitwuzla - + @@ -131,7 +131,7 @@

    QF_BVFP (Single Query Track)

    - + Bitwuzla-fixedn 0 394 @@ -142,7 +142,7 @@

    QF_BVFP (Single Query Track)

    - + Bitwuzla 0 394 @@ -153,7 +153,7 @@

    QF_BVFP (Single Query Track)

    - + CVC4 0 394 @@ -164,7 +164,7 @@

    QF_BVFP (Single Query Track)

    - + 2019-Par4n 0 393 @@ -175,7 +175,7 @@

    QF_BVFP (Single Query Track)

    - + MathSAT5n 0 391 @@ -186,7 +186,7 @@

    QF_BVFP (Single Query Track)

    - + z3n 0 386 @@ -197,7 +197,7 @@

    QF_BVFP (Single Query Track)

    - + COLIBRI 0 366 @@ -219,7 +219,7 @@

    QF_BVFP (Single Query Track)

    - + Bitwuzla 0 394781.529777.60439418620800 @@ -228,7 +228,7 @@

    QF_BVFP (Single Query Track)

    - + Bitwuzla-fixedn 0 394778.029778.41739418620800 @@ -237,7 +237,7 @@

    QF_BVFP (Single Query Track)

    - + CVC4 0 3941686.8791663.3739418620800 @@ -246,7 +246,7 @@

    QF_BVFP (Single Query Track)

    - + 2019-Par4n 0 3932699.8951818.44839318520811 @@ -255,7 +255,7 @@

    QF_BVFP (Single Query Track)

    - + MathSAT5n 0 3915739.2495737.53339118620533 @@ -264,7 +264,7 @@

    QF_BVFP (Single Query Track)

    - + z3n 0 38613315.99913313.58638618520188 @@ -273,7 +273,7 @@

    QF_BVFP (Single Query Track)

    - + COLIBRI 0 3663075.5573076.605366181185282 @@ -293,7 +293,7 @@

    QF_BVFP (Single Query Track)

    - + Bitwuzla-fixedn 0 186159.292159.34718618602080 @@ -302,7 +302,7 @@

    QF_BVFP (Single Query Track)

    - + Bitwuzla 0 186161.223159.7318618602080 @@ -311,7 +311,7 @@

    QF_BVFP (Single Query Track)

    - + MathSAT5n 0 186382.88381.88218618602083 @@ -320,7 +320,7 @@

    QF_BVFP (Single Query Track)

    - + CVC4 0 186856.244837.67118618602080 @@ -329,7 +329,7 @@

    QF_BVFP (Single Query Track)

    - + 2019-Par4n 0 1851534.3421370.64318518502091 @@ -338,7 +338,7 @@

    QF_BVFP (Single Query Track)

    - + z3n 0 1852316.9112315.64718518502098 @@ -347,7 +347,7 @@

    QF_BVFP (Single Query Track)

    - + COLIBRI 0 181271.591271.92418118102132 @@ -367,7 +367,7 @@

    QF_BVFP (Single Query Track)

    - + 2019-Par4n 0 2081165.554447.80520802081861 @@ -376,7 +376,7 @@

    QF_BVFP (Single Query Track)

    - + Bitwuzla 0 208620.306617.87420802081860 @@ -385,7 +385,7 @@

    QF_BVFP (Single Query Track)

    - + Bitwuzla-fixedn 0 208618.736619.0720802081860 @@ -394,7 +394,7 @@

    QF_BVFP (Single Query Track)

    - + CVC4 0 208830.635825.69920802081860 @@ -403,7 +403,7 @@

    QF_BVFP (Single Query Track)

    - + MathSAT5n 0 2055356.3695355.65220502051893 @@ -412,7 +412,7 @@

    QF_BVFP (Single Query Track)

    - + z3n 0 20110999.08910997.93920102011938 @@ -421,7 +421,7 @@

    QF_BVFP (Single Query Track)

    - + COLIBRI 0 1852803.9672804.68118501852092 @@ -441,7 +441,7 @@

    QF_BVFP (Single Query Track)

    - + Bitwuzla-fixedn 0 387540.718541.0138718420377 @@ -450,7 +450,7 @@

    QF_BVFP (Single Query Track)

    - + Bitwuzla 0 387545.18541.22638718420377 @@ -459,7 +459,7 @@

    QF_BVFP (Single Query Track)

    - + 2019-Par4n 0 387940.689543.60338718420377 @@ -468,7 +468,7 @@

    QF_BVFP (Single Query Track)

    - + CVC4 0 3841080.8871057.2533841832011010 @@ -477,7 +477,7 @@

    QF_BVFP (Single Query Track)

    - + MathSAT5n 0 3751013.9131011.8523751831921919 @@ -486,7 +486,7 @@

    QF_BVFP (Single Query Track)

    - + z3n 0 3581829.6691826.6143581771813636 @@ -495,7 +495,7 @@

    QF_BVFP (Single Query Track)

    - + COLIBRI 0 356611.02611.9783561771793812 @@ -519,7 +519,6 @@

    QF_BVFP (Single Query Track)

    - + - diff --git a/archive/2020/results/qf-bvfp-unsat-core.html b/archive/2020/results/qf-bvfp-unsat-core.html index ad0f2ba2..0d2f20b9 100644 --- a/archive/2020/results/qf-bvfp-unsat-core.html +++ b/archive/2020/results/qf-bvfp-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_BVFP (Unsat Core Track)

    Competition results for the QF_BVFP - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_BVFP (Unsat Core Track)

    Sequential PerformanceParallel Performance BitwuzlaBitwuzla - - + + @@ -126,7 +126,7 @@

    QF_BVFP (Unsat Core Track)

    - + Bitwuzla 0 11663 @@ -137,7 +137,7 @@

    QF_BVFP (Unsat Core Track)

    - + Bitwuzla-fixedn 0 11653 @@ -148,7 +148,7 @@

    QF_BVFP (Unsat Core Track)

    - + z3n 0 11563 @@ -159,7 +159,7 @@

    QF_BVFP (Unsat Core Track)

    - + CVC4-uc 0 11179 @@ -170,7 +170,7 @@

    QF_BVFP (Unsat Core Track)

    - + MathSAT5n 0 248 @@ -192,7 +192,7 @@

    QF_BVFP (Unsat Core Track)

    - + Bitwuzla 0 116632386.3092386.8941 @@ -201,7 +201,7 @@

    QF_BVFP (Unsat Core Track)

    - + Bitwuzla-fixedn 0 116532396.2432396.6531 @@ -210,7 +210,7 @@

    QF_BVFP (Unsat Core Track)

    - + z3n 0 1156337915.27437900.85624 @@ -219,7 +219,7 @@

    QF_BVFP (Unsat Core Track)

    - + CVC4-uc 0 111793286.7813275.8491 @@ -228,7 +228,7 @@

    QF_BVFP (Unsat Core Track)

    - + MathSAT5n 0 24822.8725.7310 @@ -252,7 +252,6 @@

    QF_BVFP (Unsat Core Track)

    - + - diff --git a/archive/2020/results/qf-bvfplra-single-query.html b/archive/2020/results/qf-bvfplra-single-query.html index dd76f680..bad00e4b 100644 --- a/archive/2020/results/qf-bvfplra-single-query.html +++ b/archive/2020/results/qf-bvfplra-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_BVFPLRA (Single Query Track)

    Competition results for the QF_BVFPLRA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_BVFPLRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) CVC4CVC4CVC4 - - + + COLIBRI - - + + CVC4 - + @@ -131,7 +131,7 @@

    QF_BVFPLRA (Single Query Track)

    - + MathSAT5n 0 157 @@ -142,7 +142,7 @@

    QF_BVFPLRA (Single Query Track)

    - + CVC4 0 153 @@ -153,7 +153,7 @@

    QF_BVFPLRA (Single Query Track)

    - + COLIBRI 0 138 @@ -175,7 +175,7 @@

    QF_BVFPLRA (Single Query Track)

    - + MathSAT5n 0 15717912.2417913.001157116411111 @@ -184,7 +184,7 @@

    QF_BVFPLRA (Single Query Track)

    - + CVC4 0 15323736.9323737.577153113401515 @@ -193,7 +193,7 @@

    QF_BVFPLRA (Single Query Track)

    - + COLIBRI 0 13822980.13222980.48513895433019 @@ -213,7 +213,7 @@

    QF_BVFPLRA (Single Query Track)

    - + MathSAT5n 0 1161642.8891643.07511611605211 @@ -222,7 +222,7 @@

    QF_BVFPLRA (Single Query Track)

    - + CVC4 0 1135036.3495036.54211311305515 @@ -231,7 +231,7 @@

    QF_BVFPLRA (Single Query Track)

    - + COLIBRI 0 9514494.8614495.088959507319 @@ -251,7 +251,7 @@

    QF_BVFPLRA (Single Query Track)

    - + COLIBRI 0 438485.2728485.3964304312519 @@ -260,7 +260,7 @@

    QF_BVFPLRA (Single Query Track)

    - + MathSAT5n 0 4116269.35116269.9254104112711 @@ -269,7 +269,7 @@

    QF_BVFPLRA (Single Query Track)

    - + CVC4 0 4018700.5818701.0354004012815 @@ -289,7 +289,7 @@

    QF_BVFPLRA (Single Query Track)

    - + CVC4 0 142724.302724.31142108342626 @@ -298,7 +298,7 @@

    QF_BVFPLRA (Single Query Track)

    - + MathSAT5n 0 142774.081774.131142108342626 @@ -307,7 +307,7 @@

    QF_BVFPLRA (Single Query Track)

    - + COLIBRI 0 137635.445635.79113795423120 @@ -331,7 +331,6 @@

    QF_BVFPLRA (Single Query Track)

    - + - diff --git a/archive/2020/results/qf-dt-single-query.html b/archive/2020/results/qf-dt-single-query.html index 38579b61..e881845c 100644 --- a/archive/2020/results/qf-dt-single-query.html +++ b/archive/2020/results/qf-dt-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_DT (Single Query Track)

    Competition results for the QF_DT - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_DT (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) CVC4CVC4CVC4 - - + + CVC4 - - + + CVC4 - + @@ -131,7 +131,7 @@

    QF_DT (Single Query Track)

    - + z3n 0 4 @@ -142,7 +142,7 @@

    QF_DT (Single Query Track)

    - + 2018-CVC4n 0 4 @@ -153,7 +153,7 @@

    QF_DT (Single Query Track)

    - + CVC4 0 4 @@ -164,7 +164,7 @@

    QF_DT (Single Query Track)

    - + Alt-Ergo 0 0 @@ -186,7 +186,7 @@

    QF_DT (Single Query Track)

    - + z3n 0 42.9822.98343100 @@ -195,7 +195,7 @@

    QF_DT (Single Query Track)

    - + 2018-CVC4n 0 425.98725.99243100 @@ -204,7 +204,7 @@

    QF_DT (Single Query Track)

    - + CVC4 0 460.26260.26243100 @@ -213,7 +213,7 @@

    QF_DT (Single Query Track)

    - + Alt-Ergo 0 01200.4011200.14800041 @@ -233,7 +233,7 @@

    QF_DT (Single Query Track)

    - + 2018-CVC4n 0 30.0450.04433010 @@ -242,7 +242,7 @@

    QF_DT (Single Query Track)

    - + CVC4 0 30.050.04933010 @@ -251,7 +251,7 @@

    QF_DT (Single Query Track)

    - + z3n 0 30.1360.13733010 @@ -260,7 +260,7 @@

    QF_DT (Single Query Track)

    - + Alt-Ergo 0 00.4010.14800041 @@ -280,7 +280,7 @@

    QF_DT (Single Query Track)

    - + z3n 0 12.8462.84610130 @@ -289,7 +289,7 @@

    QF_DT (Single Query Track)

    - + 2018-CVC4n 0 125.94225.94810130 @@ -298,7 +298,7 @@

    QF_DT (Single Query Track)

    - + CVC4 0 160.21260.21310130 @@ -307,7 +307,7 @@

    QF_DT (Single Query Track)

    - + Alt-Ergo 0 01200.01200.000041 @@ -327,7 +327,7 @@

    QF_DT (Single Query Track)

    - + z3n 0 42.9822.98343100 @@ -336,7 +336,7 @@

    QF_DT (Single Query Track)

    - + 2018-CVC4n 0 324.04524.04433011 @@ -345,7 +345,7 @@

    QF_DT (Single Query Track)

    - + CVC4 0 324.0524.04933011 @@ -354,7 +354,7 @@

    QF_DT (Single Query Track)

    - + Alt-Ergo 0 024.40124.14800041 @@ -378,7 +378,6 @@

    QF_DT (Single Query Track)

    - + - diff --git a/archive/2020/results/qf-fp-incremental.html b/archive/2020/results/qf-fp-incremental.html index f356af49..fce02b1b 100644 --- a/archive/2020/results/qf-fp-incremental.html +++ b/archive/2020/results/qf-fp-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_FP (Incremental Track)

    Competition results for the QF_FP - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_FP (Incremental Track)

    Parallel Performance CVC4-inc - - + + @@ -124,7 +124,7 @@

    QF_FP (Incremental Track)

    - + CVC4-inc 0 40.0170.01700 @@ -133,7 +133,7 @@

    QF_FP (Incremental Track)

    - + Bitwuzla 0 40.0190.01900 @@ -142,7 +142,7 @@

    QF_FP (Incremental Track)

    - + Bitwuzla-fixedn 0 40.020.0200 @@ -151,7 +151,7 @@

    QF_FP (Incremental Track)

    - + MathSAT5n 0 40.0210.02100 @@ -160,7 +160,7 @@

    QF_FP (Incremental Track)

    - + z3n 0 40.0350.03400 @@ -184,7 +184,6 @@

    QF_FP (Incremental Track)

    - + - diff --git a/archive/2020/results/qf-fp-single-query.html b/archive/2020/results/qf-fp-single-query.html index 39661fff..1fdcf06b 100644 --- a/archive/2020/results/qf-fp-single-query.html +++ b/archive/2020/results/qf-fp-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_FP (Single Query Track)

    Competition results for the QF_FP - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_FP (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) BitwuzlaBitwuzlaBitwuzla - - + + Bitwuzla - - + + COLIBRI - + @@ -131,7 +131,7 @@

    QF_FP (Single Query Track)

    - + Bitwuzla-fixedn 0 233 @@ -142,7 +142,7 @@

    QF_FP (Single Query Track)

    - + Bitwuzla 0 233 @@ -153,7 +153,7 @@

    QF_FP (Single Query Track)

    - + 2019-Par4n 0 218 @@ -164,7 +164,7 @@

    QF_FP (Single Query Track)

    - + COLIBRI 0 214 @@ -175,7 +175,7 @@

    QF_FP (Single Query Track)

    - + MathSAT5n 0 202 @@ -186,7 +186,7 @@

    QF_FP (Single Query Track)

    - + CVC4 0 202 @@ -197,7 +197,7 @@

    QF_FP (Single Query Track)

    - + z3n 0 164 @@ -219,7 +219,7 @@

    QF_FP (Single Query Track)

    - + Bitwuzla-fixedn 0 23364606.81864607.058233135983737 @@ -228,7 +228,7 @@

    QF_FP (Single Query Track)

    - + Bitwuzla 0 23364761.19564715.517233135983737 @@ -237,7 +237,7 @@

    QF_FP (Single Query Track)

    - + 2019-Par4n 0 22481582.77164035.242224125994646 @@ -246,7 +246,7 @@

    QF_FP (Single Query Track)

    - + COLIBRI 0 21468909.0668909.645214117975656 @@ -255,7 +255,7 @@

    QF_FP (Single Query Track)

    - + MathSAT5n 0 20298448.71298401.301202124786868 @@ -264,7 +264,7 @@

    QF_FP (Single Query Track)

    - + CVC4 0 202105375.917105379.464202119836868 @@ -273,7 +273,7 @@

    QF_FP (Single Query Track)

    - + z3n 0 164152798.749152802.63816410262106104 @@ -293,7 +293,7 @@

    QF_FP (Single Query Track)

    - + Bitwuzla 0 13513539.16713539.207135135013537 @@ -302,7 +302,7 @@

    QF_FP (Single Query Track)

    - + Bitwuzla-fixedn 0 13513545.5513546.833135135013537 @@ -311,7 +311,7 @@

    QF_FP (Single Query Track)

    - + 2019-Par4n 0 12536530.94324995.623125125014546 @@ -320,7 +320,7 @@

    QF_FP (Single Query Track)

    - + MathSAT5n 0 12427252.36627229.387124124014668 @@ -329,7 +329,7 @@

    QF_FP (Single Query Track)

    - + CVC4 0 11936828.68536830.436119119015168 @@ -338,7 +338,7 @@

    QF_FP (Single Query Track)

    - + COLIBRI 0 11729723.27529723.662117117015356 @@ -347,7 +347,7 @@

    QF_FP (Single Query Track)

    - + z3n 0 10263539.43363542.191021020168104 @@ -367,7 +367,7 @@

    QF_FP (Single Query Track)

    - + 2019-Par4n 0 9929451.82823439.6199909917146 @@ -376,7 +376,7 @@

    QF_FP (Single Query Track)

    - + Bitwuzla-fixedn 0 9835461.26835460.2259809817237 @@ -385,7 +385,7 @@

    QF_FP (Single Query Track)

    - + Bitwuzla 0 9835622.02935576.3119809817237 @@ -394,7 +394,7 @@

    QF_FP (Single Query Track)

    - + COLIBRI 0 9723585.78523585.9849709717356 @@ -403,7 +403,7 @@

    QF_FP (Single Query Track)

    - + CVC4 0 8352947.23252949.0288308318768 @@ -412,7 +412,7 @@

    QF_FP (Single Query Track)

    - + MathSAT5n 0 7855596.34655571.9147807819268 @@ -421,7 +421,7 @@

    QF_FP (Single Query Track)

    - + z3n 0 6273659.31673660.44862062208104 @@ -441,7 +441,7 @@

    QF_FP (Single Query Track)

    - + COLIBRI 0 1952231.0492231.535195111847575 @@ -450,7 +450,7 @@

    QF_FP (Single Query Track)

    - + 2019-Par4n 0 1863051.6842397.193186102848484 @@ -459,7 +459,7 @@

    QF_FP (Single Query Track)

    - + Bitwuzla 0 1653030.4183017.42216510263105105 @@ -468,7 +468,7 @@

    QF_FP (Single Query Track)

    - + Bitwuzla-fixedn 0 1653022.3733018.5116510263105105 @@ -477,7 +477,7 @@

    QF_FP (Single Query Track)

    - + CVC4 0 1244100.0274100.071247549146146 @@ -486,7 +486,7 @@

    QF_FP (Single Query Track)

    - + MathSAT5n 0 1224276.254249.6941227745148148 @@ -495,7 +495,7 @@

    QF_FP (Single Query Track)

    - + z3n 0 605490.9815491.01603921210208 @@ -519,7 +519,6 @@

    QF_FP (Single Query Track)

    - + - diff --git a/archive/2020/results/qf-fp-unsat-core.html b/archive/2020/results/qf-fp-unsat-core.html index 6931f831..8c71a425 100644 --- a/archive/2020/results/qf-fp-unsat-core.html +++ b/archive/2020/results/qf-fp-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_FP (Unsat Core Track)

    Competition results for the QF_FP - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_FP (Unsat Core Track)

    Sequential PerformanceParallel Performance BitwuzlaBitwuzla - - + + @@ -126,7 +126,7 @@

    QF_FP (Unsat Core Track)

    - + Bitwuzla-fixedn 0 105 @@ -137,7 +137,7 @@

    QF_FP (Unsat Core Track)

    - + Bitwuzla 0 101 @@ -148,7 +148,7 @@

    QF_FP (Unsat Core Track)

    - + CVC4-uc 0 81 @@ -159,7 +159,7 @@

    QF_FP (Unsat Core Track)

    - + z3n 0 40 @@ -170,7 +170,7 @@

    QF_FP (Unsat Core Track)

    - + MathSAT5n 0 0 @@ -192,7 +192,7 @@

    QF_FP (Unsat Core Track)

    - + Bitwuzla-fixedn 0 1057667.0937603.6992 @@ -201,7 +201,7 @@

    QF_FP (Unsat Core Track)

    - + Bitwuzla 0 1016904.1076837.0361 @@ -210,7 +210,7 @@

    QF_FP (Unsat Core Track)

    - + CVC4-uc 0 8119035.67718831.1012 @@ -219,7 +219,7 @@

    QF_FP (Unsat Core Track)

    - + z3n 0 40738811.261738625.506567 @@ -228,7 +228,7 @@

    QF_FP (Unsat Core Track)

    - + MathSAT5n 0 0142.8165.8370 @@ -252,7 +252,6 @@

    QF_FP (Unsat Core Track)

    - + - diff --git a/archive/2020/results/qf-fplra-single-query.html b/archive/2020/results/qf-fplra-single-query.html index 8320a893..62b1aeea 100644 --- a/archive/2020/results/qf-fplra-single-query.html +++ b/archive/2020/results/qf-fplra-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_FPLRA (Single Query Track)

    Competition results for the QF_FPLRA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_FPLRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) COLIBRICOLIBRICOLIBRI - - + + COLIBRI - - + + COLIBRI - + @@ -131,7 +131,7 @@

    QF_FPLRA (Single Query Track)

    - + COLIBRI 0 54 @@ -142,7 +142,7 @@

    QF_FPLRA (Single Query Track)

    - + MathSAT5n 0 48 @@ -153,7 +153,7 @@

    QF_FPLRA (Single Query Track)

    - + z3n 0 47 @@ -164,7 +164,7 @@

    QF_FPLRA (Single Query Track)

    - + CVC4 0 46 @@ -186,7 +186,7 @@

    QF_FPLRA (Single Query Track)

    - + COLIBRI 0 543678.1563678.2435448633 @@ -195,7 +195,7 @@

    QF_FPLRA (Single Query Track)

    - + MathSAT5n 0 4811275.64811275.7014845399 @@ -204,7 +204,7 @@

    QF_FPLRA (Single Query Track)

    - + z3n 0 4716962.1416962.85474431010 @@ -213,7 +213,7 @@

    QF_FPLRA (Single Query Track)

    - + CVC4 0 4613317.37913317.381464241111 @@ -233,7 +233,7 @@

    QF_FPLRA (Single Query Track)

    - + COLIBRI 0 483655.7883655.8434848093 @@ -242,7 +242,7 @@

    QF_FPLRA (Single Query Track)

    - + MathSAT5n 0 457674.4697674.52245450129 @@ -251,7 +251,7 @@

    QF_FPLRA (Single Query Track)

    - + z3n 0 4413362.01213362.722444401310 @@ -260,7 +260,7 @@

    QF_FPLRA (Single Query Track)

    - + CVC4 0 4210851.91610851.917424201511 @@ -280,7 +280,7 @@

    QF_FPLRA (Single Query Track)

    - + COLIBRI 0 622.36822.4606513 @@ -289,7 +289,7 @@

    QF_FPLRA (Single Query Track)

    - + CVC4 0 42465.4632465.4644045311 @@ -298,7 +298,7 @@

    QF_FPLRA (Single Query Track)

    - + z3n 0 33600.1283600.1283035410 @@ -307,7 +307,7 @@

    QF_FPLRA (Single Query Track)

    - + MathSAT5n 0 33601.1793601.179303549 @@ -327,7 +327,7 @@

    QF_FPLRA (Single Query Track)

    - + COLIBRI 0 54150.156150.2435448633 @@ -336,7 +336,7 @@

    QF_FPLRA (Single Query Track)

    - + CVC4 0 44330.92330.918444131313 @@ -345,7 +345,7 @@

    QF_FPLRA (Single Query Track)

    - + MathSAT5n 0 44332.033332.038444131313 @@ -354,7 +354,7 @@

    QF_FPLRA (Single Query Track)

    - + z3n 0 191072.9161072.924191633838 @@ -378,7 +378,6 @@

    QF_FPLRA (Single Query Track)

    - + - diff --git a/archive/2020/results/qf-idl-model-validation.html b/archive/2020/results/qf-idl-model-validation.html index 81fa0774..d5065510 100644 --- a/archive/2020/results/qf-idl-model-validation.html +++ b/archive/2020/results/qf-idl-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_IDL (Model Validation Track)

    Competition results for the QF_IDL - + division - + in the Model Validation Track.

    @@ -119,7 +119,7 @@

    QF_IDL (Model Validation Track)

    - + Yices2 Model Validation 0 487 @@ -130,7 +130,7 @@

    QF_IDL (Model Validation Track)

    - + Yices2-fixed Model Validationn 0 487 @@ -141,7 +141,7 @@

    QF_IDL (Model Validation Track)

    - + z3n 0 478 @@ -152,7 +152,7 @@

    QF_IDL (Model Validation Track)

    - + CVC4-mv 0 441 @@ -163,7 +163,7 @@

    QF_IDL (Model Validation Track)

    - + SMTInterpol-fixedn 0 320 @@ -174,7 +174,7 @@

    QF_IDL (Model Validation Track)

    - + SMTInterpol 0 319 @@ -185,7 +185,7 @@

    QF_IDL (Model Validation Track)

    - + MathSAT5-mvn 5* 334 @@ -207,7 +207,7 @@

    QF_IDL (Model Validation Track)

    - + Yices2 Model Validation 0 48735391.70835375.52125 @@ -216,7 +216,7 @@

    QF_IDL (Model Validation Track)

    - + Yices2-fixed Model Validationn 0 48735407.61835392.46925 @@ -225,7 +225,7 @@

    QF_IDL (Model Validation Track)

    - + z3n 0 47844663.60444678.17629 @@ -234,7 +234,7 @@

    QF_IDL (Model Validation Track)

    - + CVC4-mv 0 441130590.026130540.47371 @@ -243,7 +243,7 @@

    QF_IDL (Model Validation Track)

    - + SMTInterpol-fixedn 0 321268441.109264925.145191 @@ -252,7 +252,7 @@

    QF_IDL (Model Validation Track)

    - + SMTInterpol 0 320268671.694265242.24192 @@ -261,7 +261,7 @@

    QF_IDL (Model Validation Track)

    - + MathSAT5-mvn 5* 334238118.942238111.74173 @@ -288,7 +288,6 @@

    QF_IDL (Model Validation Track)

    - + - diff --git a/archive/2020/results/qf-idl-single-query.html b/archive/2020/results/qf-idl-single-query.html index 10aed2c8..46270bb3 100644 --- a/archive/2020/results/qf-idl-single-query.html +++ b/archive/2020/results/qf-idl-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_IDL (Single Query Track)

    Competition results for the QF_IDL - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_IDL (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Yices2Yices2Yices2 - - + + CVC4 - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_IDL (Single Query Track)

    - + Yices2 0 742 @@ -142,7 +142,7 @@

    QF_IDL (Single Query Track)

    - + Yices2-fixedn 0 742 @@ -153,7 +153,7 @@

    QF_IDL (Single Query Track)

    - + 2019-Z3n 0 739 @@ -164,7 +164,7 @@

    QF_IDL (Single Query Track)

    - + 2019-Par4n 0 736 @@ -175,7 +175,7 @@

    QF_IDL (Single Query Track)

    - + z3n 0 721 @@ -186,7 +186,7 @@

    QF_IDL (Single Query Track)

    - + CVC4 0 680 @@ -197,7 +197,7 @@

    QF_IDL (Single Query Track)

    - + veriT 0 610 @@ -208,7 +208,7 @@

    QF_IDL (Single Query Track)

    - + MathSAT5n 0 566 @@ -219,7 +219,7 @@

    QF_IDL (Single Query Track)

    - + SMTInterpol 0 534 @@ -230,7 +230,7 @@

    QF_IDL (Single Query Track)

    - + SMTInterpol-fixedn 0 534 @@ -252,7 +252,7 @@

    QF_IDL (Single Query Track)

    - + 2019-Par4n 0 752162651.156114684.4457524902628282 @@ -261,7 +261,7 @@

    QF_IDL (Single Query Track)

    - + Yices2 0 742123885.645123885.417424862569292 @@ -270,7 +270,7 @@

    QF_IDL (Single Query Track)

    - + Yices2-fixedn 0 742124129.899124114.7737424862569292 @@ -279,7 +279,7 @@

    QF_IDL (Single Query Track)

    - + 2019-Z3n 0 739137191.995137143.0237394782619595 @@ -288,7 +288,7 @@

    QF_IDL (Single Query Track)

    - + z3n 0 721122208.739122186.10372148223911384 @@ -297,7 +297,7 @@

    QF_IDL (Single Query Track)

    - + CVC4 0 680235500.889235447.814680423257154154 @@ -306,7 +306,7 @@

    QF_IDL (Single Query Track)

    - + veriT 0 610320214.923320170.329610362248224224 @@ -315,7 +315,7 @@

    QF_IDL (Single Query Track)

    - + MathSAT5n 0 566356482.137356483.712566326240268268 @@ -324,7 +324,7 @@

    QF_IDL (Single Query Track)

    - + SMTInterpol 0 535410534.011405159.63535303232299299 @@ -333,7 +333,7 @@

    QF_IDL (Single Query Track)

    - + SMTInterpol-fixedn 0 535410724.685405305.573535303232299299 @@ -353,7 +353,7 @@

    QF_IDL (Single Query Track)

    - + 2019-Par4n 0 49066818.87740240.697490490034482 @@ -362,7 +362,7 @@

    QF_IDL (Single Query Track)

    - + Yices2 0 48643714.31943713.35486486034892 @@ -371,7 +371,7 @@

    QF_IDL (Single Query Track)

    - + Yices2-fixedn 0 48643784.62343772.185486486034892 @@ -380,7 +380,7 @@

    QF_IDL (Single Query Track)

    - + z3n 0 48248397.60648377.255482482035284 @@ -389,7 +389,7 @@

    QF_IDL (Single Query Track)

    - + 2019-Z3n 0 47859807.79659758.151478478035695 @@ -398,7 +398,7 @@

    QF_IDL (Single Query Track)

    - + CVC4 0 423150474.754150423.0384234230411154 @@ -407,7 +407,7 @@

    QF_IDL (Single Query Track)

    - + veriT 0 362228006.542227961.1433623620472224 @@ -416,7 +416,7 @@

    QF_IDL (Single Query Track)

    - + MathSAT5n 0 326257334.842257337.8383263260508268 @@ -425,7 +425,7 @@

    QF_IDL (Single Query Track)

    - + SMTInterpol 0 303292797.388289315.9693033030531299 @@ -434,7 +434,7 @@

    QF_IDL (Single Query Track)

    - + SMTInterpol-fixedn 0 303292892.027289379.3563033030531299 @@ -454,7 +454,7 @@

    QF_IDL (Single Query Track)

    - + 2019-Par4n 0 26263432.2842043.747262026257282 @@ -463,7 +463,7 @@

    QF_IDL (Single Query Track)

    - + 2019-Z3n 0 26144984.19944984.872261026157395 @@ -472,7 +472,7 @@

    QF_IDL (Single Query Track)

    - + CVC4 0 25752626.13652624.7762570257577154 @@ -481,7 +481,7 @@

    QF_IDL (Single Query Track)

    - + Yices2 0 25647771.32647772.061256025657892 @@ -490,7 +490,7 @@

    QF_IDL (Single Query Track)

    - + Yices2-fixedn 0 25647945.27647942.588256025657892 @@ -499,7 +499,7 @@

    QF_IDL (Single Query Track)

    - + veriT 0 24859808.38159809.1862480248586224 @@ -508,7 +508,7 @@

    QF_IDL (Single Query Track)

    - + MathSAT5n 0 24066747.29566745.8742400240594268 @@ -517,7 +517,7 @@

    QF_IDL (Single Query Track)

    - + z3n 0 23941411.13241408.848239023959584 @@ -526,7 +526,7 @@

    QF_IDL (Single Query Track)

    - + SMTInterpol 0 23285336.62483443.6612320232602299 @@ -535,7 +535,7 @@

    QF_IDL (Single Query Track)

    - + SMTInterpol-fixedn 0 23285432.65883526.2182320232602299 @@ -555,7 +555,7 @@

    QF_IDL (Single Query Track)

    - + 2019-Par4n 0 6727410.354845.351672438234162162 @@ -564,7 +564,7 @@

    QF_IDL (Single Query Track)

    - + Yices2 0 6704966.9964965.419670439231164164 @@ -573,7 +573,7 @@

    QF_IDL (Single Query Track)

    - + Yices2-fixedn 0 6694982.7314966.236669438231165165 @@ -582,7 +582,7 @@

    QF_IDL (Single Query Track)

    - + 2019-Z3n 0 6236747.0466695.638623403220211211 @@ -591,7 +591,7 @@

    QF_IDL (Single Query Track)

    - + z3n 0 6056590.3216564.906605405200229200 @@ -600,7 +600,7 @@

    QF_IDL (Single Query Track)

    - + CVC4 0 44711082.35811070.213447240207387387 @@ -609,7 +609,7 @@

    QF_IDL (Single Query Track)

    - + MathSAT5n 0 44310674.75210672.77443227216391391 @@ -618,7 +618,7 @@

    QF_IDL (Single Query Track)

    - + veriT 0 41511021.18111021.246415195220419419 @@ -627,7 +627,7 @@

    QF_IDL (Single Query Track)

    - + SMTInterpol-fixedn 0 34915198.68513235.643349172177485485 @@ -636,7 +636,7 @@

    QF_IDL (Single Query Track)

    - + SMTInterpol 0 34815192.70713235.96348172176486486 @@ -660,7 +660,6 @@

    QF_IDL (Single Query Track)

    - + - diff --git a/archive/2020/results/qf-lia-incremental.html b/archive/2020/results/qf-lia-incremental.html index ec816356..a3dab4e2 100644 --- a/archive/2020/results/qf-lia-incremental.html +++ b/archive/2020/results/qf-lia-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LIA (Incremental Track)

    Competition results for the QF_LIA - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_LIA (Incremental Track)

    Parallel Performance Yices2 incremental - - + + @@ -124,7 +124,7 @@

    QF_LIA (Incremental Track)

    - + Yices2-fixed incrementaln 0 2004034432239.05731776.04687020 @@ -133,7 +133,7 @@

    QF_LIA (Incremental Track)

    - + 2018-Yices (incremental)n 0 2004034232110.05331686.69687220 @@ -142,7 +142,7 @@

    QF_LIA (Incremental Track)

    - + Yices2 incremental 0 2004033732150.68931694.20787720 @@ -151,7 +151,7 @@

    QF_LIA (Incremental Track)

    - + SMTInterpol 0 1811607753008.01851411.234192513736 @@ -160,7 +160,7 @@

    QF_LIA (Incremental Track)

    - + SMTInterpol-fixedn 0 1809289553028.05351425.701194831936 @@ -169,7 +169,7 @@

    QF_LIA (Incremental Track)

    - + MathSAT5n 0 1653840041739.86541350.49350281427 @@ -178,7 +178,7 @@

    QF_LIA (Incremental Track)

    - + CVC4-inc 0 726572256615.00556558.7491277549241 @@ -187,7 +187,7 @@

    QF_LIA (Incremental Track)

    - + z3n 0 725315359661.23259606.4651278806142 @@ -211,7 +211,6 @@

    QF_LIA (Incremental Track)

    - + - diff --git a/archive/2020/results/qf-lia-model-validation.html b/archive/2020/results/qf-lia-model-validation.html index f4d4c453..659f6c2b 100644 --- a/archive/2020/results/qf-lia-model-validation.html +++ b/archive/2020/results/qf-lia-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LIA (Model Validation Track)

    Competition results for the QF_LIA - + division - + in the Model Validation Track.

    @@ -119,7 +119,7 @@

    QF_LIA (Model Validation Track)

    - + z3n 0 1461 @@ -130,7 +130,7 @@

    QF_LIA (Model Validation Track)

    - + CVC4-mv 0 1413 @@ -141,7 +141,7 @@

    QF_LIA (Model Validation Track)

    - + Yices2 Model Validation 0 1293 @@ -152,7 +152,7 @@

    QF_LIA (Model Validation Track)

    - + Yices2-fixed Model Validationn 0 1292 @@ -163,7 +163,7 @@

    QF_LIA (Model Validation Track)

    - + SMTInterpol-fixedn 0 1273 @@ -174,7 +174,7 @@

    QF_LIA (Model Validation Track)

    - + SMTInterpol 0 1272 @@ -185,7 +185,7 @@

    QF_LIA (Model Validation Track)

    - + MathSAT5-mvn 14* 1440 @@ -207,7 +207,7 @@

    QF_LIA (Model Validation Track)

    - + z3n 0 1461188641.519188588.46770 @@ -216,7 +216,7 @@

    QF_LIA (Model Validation Track)

    - + CVC4-mv 0 1413171792.048171517.539116 @@ -225,7 +225,7 @@

    QF_LIA (Model Validation Track)

    - + Yices2 Model Validation 0 1293318653.504318579.807239 @@ -234,7 +234,7 @@

    QF_LIA (Model Validation Track)

    - + Yices2-fixed Model Validationn 0 1292318829.934318829.741240 @@ -243,7 +243,7 @@

    QF_LIA (Model Validation Track)

    - + SMTInterpol-fixedn 0 1275404712.029389760.068257 @@ -252,7 +252,7 @@

    QF_LIA (Model Validation Track)

    - + SMTInterpol 0 1274404650.729389736.485258 @@ -261,7 +261,7 @@

    QF_LIA (Model Validation Track)

    - + MathSAT5-mvn 14* 1440176516.546176431.02278 @@ -288,7 +288,6 @@

    QF_LIA (Model Validation Track)

    - + - diff --git a/archive/2020/results/qf-lia-single-query.html b/archive/2020/results/qf-lia-single-query.html index d42f25a0..4a16f693 100644 --- a/archive/2020/results/qf-lia-single-query.html +++ b/archive/2020/results/qf-lia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LIA (Single Query Track)

    Competition results for the QF_LIA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_LIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) CVC4CVC4CVC4 - - + + SMTInterpol - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_LIA (Single Query Track)

    - + 2019-Par4n 0 2447 @@ -142,7 +142,7 @@

    QF_LIA (Single Query Track)

    - + MathSAT5n 0 2329 @@ -153,7 +153,7 @@

    QF_LIA (Single Query Track)

    - + CVC4 0 2290 @@ -164,7 +164,7 @@

    QF_LIA (Single Query Track)

    - + z3n 0 2233 @@ -175,7 +175,7 @@

    QF_LIA (Single Query Track)

    - + Yices2 0 2198 @@ -186,7 +186,7 @@

    QF_LIA (Single Query Track)

    - + Yices2-fixedn 0 2197 @@ -197,7 +197,7 @@

    QF_LIA (Single Query Track)

    - + SMTInterpol 0 2170 @@ -208,7 +208,7 @@

    QF_LIA (Single Query Track)

    - + SMTInterpol-fixedn 0 2168 @@ -219,7 +219,7 @@

    QF_LIA (Single Query Track)

    - + veriT 0 819 @@ -241,7 +241,7 @@

    QF_LIA (Single Query Track)

    - + 2019-Par4n 0 2459103682.29774071.7982459138410754949 @@ -250,7 +250,7 @@

    QF_LIA (Single Query Track)

    - + MathSAT5n 0 2329443971.89443789.002232913091020179179 @@ -259,7 +259,7 @@

    QF_LIA (Single Query Track)

    - + CVC4 0 2290318208.962317939.421229012561034218217 @@ -268,7 +268,7 @@

    QF_LIA (Single Query Track)

    - + z3n 0 2233519201.44519189.75622331311922275275 @@ -277,7 +277,7 @@

    QF_LIA (Single Query Track)

    - + Yices2 0 2198410115.181410099.193219811811017310310 @@ -286,7 +286,7 @@

    QF_LIA (Single Query Track)

    - + Yices2-fixedn 0 2197410271.249410163.744219711801017311311 @@ -295,7 +295,7 @@

    QF_LIA (Single Query Track)

    - + SMTInterpol 0 2170555671.409527233.967217010971073338338 @@ -304,7 +304,7 @@

    QF_LIA (Single Query Track)

    - + SMTInterpol-fixedn 0 2170555863.97527552.913217010971073338338 @@ -313,7 +313,7 @@

    QF_LIA (Single Query Track)

    - + veriT 0 8191433703.6471433672.18381957424516891152 @@ -333,7 +333,7 @@

    QF_LIA (Single Query Track)

    - + 2019-Par4n 0 138451796.30731039.904138413840112449 @@ -342,7 +342,7 @@

    QF_LIA (Single Query Track)

    - + z3n 0 1311215944.195215929.511311131101197275 @@ -351,7 +351,7 @@

    QF_LIA (Single Query Track)

    - + MathSAT5n 0 1309200590.763200464.6021309130901199179 @@ -360,7 +360,7 @@

    QF_LIA (Single Query Track)

    - + CVC4 0 1256206924.201206786.281256125601252217 @@ -369,7 +369,7 @@

    QF_LIA (Single Query Track)

    - + Yices2 0 1181295917.05295908.8921181118101327310 @@ -378,7 +378,7 @@

    QF_LIA (Single Query Track)

    - + Yices2-fixedn 0 1180296110.789296014.2191180118001328311 @@ -387,7 +387,7 @@

    QF_LIA (Single Query Track)

    - + SMTInterpol 0 1097454690.446440009.6941097109701411338 @@ -396,7 +396,7 @@

    QF_LIA (Single Query Track)

    - + SMTInterpol-fixedn 0 1097454844.083440201.9751097109701411338 @@ -405,7 +405,7 @@

    QF_LIA (Single Query Track)

    - + veriT 0 574683786.095683777.036574574019341152 @@ -425,7 +425,7 @@

    QF_LIA (Single Query Track)

    - + 2019-Par4n 0 107536285.9927431.894107501075143349 @@ -434,7 +434,7 @@

    QF_LIA (Single Query Track)

    - + SMTInterpol 0 107385380.96371624.2731073010731435338 @@ -443,7 +443,7 @@

    QF_LIA (Single Query Track)

    - + SMTInterpol-fixedn 0 107385419.88771750.9381073010731435338 @@ -452,7 +452,7 @@

    QF_LIA (Single Query Track)

    - + CVC4 0 103495684.76195553.1421034010341474217 @@ -461,7 +461,7 @@

    QF_LIA (Single Query Track)

    - + MathSAT5n 0 1020227781.127227724.41020010201488179 @@ -470,7 +470,7 @@

    QF_LIA (Single Query Track)

    - + Yices2-fixedn 0 101798560.4698549.5251017010171491311 @@ -479,7 +479,7 @@

    QF_LIA (Single Query Track)

    - + Yices2 0 101798598.13198590.3011017010171491310 @@ -488,7 +488,7 @@

    QF_LIA (Single Query Track)

    - + z3n 0 922287657.245287660.24792209221586275 @@ -497,7 +497,7 @@

    QF_LIA (Single Query Track)

    - + veriT 0 245734317.552734295.147245024522631152 @@ -517,7 +517,7 @@

    QF_LIA (Single Query Track)

    - + 2019-Par4n 0 24057636.1044497.818240513451060103103 @@ -526,7 +526,7 @@

    QF_LIA (Single Query Track)

    - + Yices2 0 205713494.27913474.497205710571000451451 @@ -535,7 +535,7 @@

    QF_LIA (Single Query Track)

    - + Yices2-fixedn 0 205713505.26213484.435205710571000451451 @@ -544,7 +544,7 @@

    QF_LIA (Single Query Track)

    - + CVC4 0 179726411.21326223.8541797994803711711 @@ -553,7 +553,7 @@

    QF_LIA (Single Query Track)

    - + z3n 0 152428434.14828386.2781524864660984984 @@ -562,7 +562,7 @@

    QF_LIA (Single Query Track)

    - + MathSAT5n 0 140829959.77729920.814140885055811001100 @@ -571,7 +571,7 @@

    QF_LIA (Single Query Track)

    - + SMTInterpol 0 121645067.93637042.005121661560112921292 @@ -580,7 +580,7 @@

    QF_LIA (Single Query Track)

    - + SMTInterpol-fixedn 0 121344972.82637049.903121361260112951295 @@ -589,7 +589,7 @@

    QF_LIA (Single Query Track)

    - + veriT 0 70333101.61533066.38870350020318051304 @@ -613,7 +613,6 @@

    QF_LIA (Single Query Track)

    - + - diff --git a/archive/2020/results/qf-lia-unsat-core.html b/archive/2020/results/qf-lia-unsat-core.html index 8cb5daca..4b430b88 100644 --- a/archive/2020/results/qf-lia-unsat-core.html +++ b/archive/2020/results/qf-lia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LIA (Unsat Core Track)

    Competition results for the QF_LIA - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_LIA (Unsat Core Track)

    Sequential PerformanceParallel Performance Yices2Yices2 - - + + @@ -126,7 +126,7 @@

    QF_LIA (Unsat Core Track)

    - + Yices2-fixedn 0 945496 @@ -137,7 +137,7 @@

    QF_LIA (Unsat Core Track)

    - + Yices2 0 945496 @@ -148,7 +148,7 @@

    QF_LIA (Unsat Core Track)

    - + z3n 0 925924 @@ -159,7 +159,7 @@

    QF_LIA (Unsat Core Track)

    - + MathSAT5n 0 924387 @@ -170,7 +170,7 @@

    QF_LIA (Unsat Core Track)

    - + CVC4-uc 0 827760 @@ -181,7 +181,7 @@

    QF_LIA (Unsat Core Track)

    - + SMTInterpol 0 811054 @@ -192,7 +192,7 @@

    QF_LIA (Unsat Core Track)

    - + SMTInterpol-fixedn 0 811054 @@ -214,7 +214,7 @@

    QF_LIA (Unsat Core Track)

    - + Yices2-fixedn 0 94549630883.8230873.91423 @@ -223,7 +223,7 @@

    QF_LIA (Unsat Core Track)

    - + Yices2 0 94549630887.09130888.31723 @@ -232,7 +232,7 @@

    QF_LIA (Unsat Core Track)

    - + z3n 0 92592475565.41275565.58662 @@ -241,7 +241,7 @@

    QF_LIA (Unsat Core Track)

    - + MathSAT5n 0 924387104441.458104419.04486 @@ -250,7 +250,7 @@

    QF_LIA (Unsat Core Track)

    - + CVC4-uc 0 827760109003.321109003.65788 @@ -259,7 +259,7 @@

    QF_LIA (Unsat Core Track)

    - + SMTInterpol 0 81105438135.70937100.76822 @@ -268,7 +268,7 @@

    QF_LIA (Unsat Core Track)

    - + SMTInterpol-fixedn 0 81105438199.8937220.58122 @@ -292,7 +292,6 @@

    QF_LIA (Unsat Core Track)

    - + - diff --git a/archive/2020/results/qf-lira-model-validation.html b/archive/2020/results/qf-lira-model-validation.html index 47303394..c5795ec7 100644 --- a/archive/2020/results/qf-lira-model-validation.html +++ b/archive/2020/results/qf-lira-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LIRA (Model Validation Track)

    Competition results for the QF_LIRA - + division - + in the Model Validation Track.

    @@ -119,7 +119,7 @@

    QF_LIRA (Model Validation Track)

    - + Yices2-fixed Model Validationn 0 1 @@ -130,7 +130,7 @@

    QF_LIRA (Model Validation Track)

    - + Yices2 Model Validation 0 1 @@ -141,7 +141,7 @@

    QF_LIRA (Model Validation Track)

    - + z3n 0 1 @@ -152,7 +152,7 @@

    QF_LIRA (Model Validation Track)

    - + CVC4-mv 0 1 @@ -163,7 +163,7 @@

    QF_LIRA (Model Validation Track)

    - + MathSAT5-mvn 0 1 @@ -174,7 +174,7 @@

    QF_LIRA (Model Validation Track)

    - + SMTInterpol 0 1 @@ -185,7 +185,7 @@

    QF_LIRA (Model Validation Track)

    - + SMTInterpol-fixedn 0 1 @@ -207,7 +207,7 @@

    QF_LIRA (Model Validation Track)

    - + Yices2-fixed Model Validationn 0 10.0980.0980 @@ -216,7 +216,7 @@

    QF_LIRA (Model Validation Track)

    - + Yices2 Model Validation 0 10.0980.0980 @@ -225,7 +225,7 @@

    QF_LIRA (Model Validation Track)

    - + z3n 0 10.3430.3440 @@ -234,7 +234,7 @@

    QF_LIRA (Model Validation Track)

    - + CVC4-mv 0 11.3571.3570 @@ -243,7 +243,7 @@

    QF_LIRA (Model Validation Track)

    - + MathSAT5-mvn 0 12.522.5230 @@ -252,7 +252,7 @@

    QF_LIRA (Model Validation Track)

    - + SMTInterpol 0 147.57828.5920 @@ -261,7 +261,7 @@

    QF_LIRA (Model Validation Track)

    - + SMTInterpol-fixedn 0 149.01429.00 @@ -285,7 +285,6 @@

    QF_LIRA (Model Validation Track)

    - + - diff --git a/archive/2020/results/qf-lira-single-query.html b/archive/2020/results/qf-lira-single-query.html index 05ce2e56..28f121b7 100644 --- a/archive/2020/results/qf-lira-single-query.html +++ b/archive/2020/results/qf-lira-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LIRA (Single Query Track)

    Competition results for the QF_LIRA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_LIRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Yices2Yices2Yices2 - - + + Yices2 - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_LIRA (Single Query Track)

    - + 2019-Par4n 0 7 @@ -142,7 +142,7 @@

    QF_LIRA (Single Query Track)

    - + z3n 0 6 @@ -153,7 +153,7 @@

    QF_LIRA (Single Query Track)

    - + Yices2 0 6 @@ -164,7 +164,7 @@

    QF_LIRA (Single Query Track)

    - + Yices2-fixedn 0 6 @@ -175,7 +175,7 @@

    QF_LIRA (Single Query Track)

    - + MathSAT5n 0 5 @@ -186,7 +186,7 @@

    QF_LIRA (Single Query Track)

    - + CVC4 0 5 @@ -197,7 +197,7 @@

    QF_LIRA (Single Query Track)

    - + SMTInterpol-fixedn 0 4 @@ -208,7 +208,7 @@

    QF_LIRA (Single Query Track)

    - + SMTInterpol 0 4 @@ -230,7 +230,7 @@

    QF_LIRA (Single Query Track)

    - + 2019-Par4n 0 7222.249111.96771600 @@ -239,7 +239,7 @@

    QF_LIRA (Single Query Track)

    - + z3n 0 61338.1041338.13761511 @@ -248,7 +248,7 @@

    QF_LIRA (Single Query Track)

    - + Yices2 0 61341.5391341.57561511 @@ -257,7 +257,7 @@

    QF_LIRA (Single Query Track)

    - + Yices2-fixedn 0 61342.6691342.67761511 @@ -266,7 +266,7 @@

    QF_LIRA (Single Query Track)

    - + MathSAT5n 0 52405.7012405.70151422 @@ -275,7 +275,7 @@

    QF_LIRA (Single Query Track)

    - + CVC4 0 52406.452406.44951422 @@ -284,7 +284,7 @@

    QF_LIRA (Single Query Track)

    - + SMTInterpol 0 43735.3893688.63241333 @@ -293,7 +293,7 @@

    QF_LIRA (Single Query Track)

    - + SMTInterpol-fixedn 0 43734.9133689.83641333 @@ -313,7 +313,7 @@

    QF_LIRA (Single Query Track)

    - + Yices2 0 10.070.0711061 @@ -322,7 +322,7 @@

    QF_LIRA (Single Query Track)

    - + Yices2-fixedn 0 10.0710.07111061 @@ -331,7 +331,7 @@

    QF_LIRA (Single Query Track)

    - + MathSAT5n 0 10.2890.28911062 @@ -340,7 +340,7 @@

    QF_LIRA (Single Query Track)

    - + z3n 0 10.3120.31211061 @@ -349,7 +349,7 @@

    QF_LIRA (Single Query Track)

    - + 2019-Par4n 0 10.0060.33411060 @@ -358,7 +358,7 @@

    QF_LIRA (Single Query Track)

    - + CVC4 0 11.1381.13711062 @@ -367,7 +367,7 @@

    QF_LIRA (Single Query Track)

    - + SMTInterpol-fixedn 0 146.76228.62711063 @@ -376,7 +376,7 @@

    QF_LIRA (Single Query Track)

    - + SMTInterpol 0 150.93529.1711063 @@ -396,7 +396,7 @@

    QF_LIRA (Single Query Track)

    - + 2019-Par4n 0 6222.243111.63360610 @@ -405,7 +405,7 @@

    QF_LIRA (Single Query Track)

    - + z3n 0 51337.7921337.82450521 @@ -414,7 +414,7 @@

    QF_LIRA (Single Query Track)

    - + Yices2 0 51341.4691341.50550521 @@ -423,7 +423,7 @@

    QF_LIRA (Single Query Track)

    - + Yices2-fixedn 0 51342.5981342.60550521 @@ -432,7 +432,7 @@

    QF_LIRA (Single Query Track)

    - + CVC4 0 42405.3122405.31240432 @@ -441,7 +441,7 @@

    QF_LIRA (Single Query Track)

    - + MathSAT5n 0 42405.4122405.41240432 @@ -450,7 +450,7 @@

    QF_LIRA (Single Query Track)

    - + SMTInterpol 0 33684.4543659.46330343 @@ -459,7 +459,7 @@

    QF_LIRA (Single Query Track)

    - + SMTInterpol-fixedn 0 33688.1513661.20930343 @@ -479,7 +479,7 @@

    QF_LIRA (Single Query Track)

    - + Yices2-fixedn 0 548.35848.36151422 @@ -488,7 +488,7 @@

    QF_LIRA (Single Query Track)

    - + Yices2 0 548.35848.36151422 @@ -497,7 +497,7 @@

    QF_LIRA (Single Query Track)

    - + z3n 0 550.93150.93251422 @@ -506,7 +506,7 @@

    QF_LIRA (Single Query Track)

    - + 2019-Par4n 0 552.68951.07651422 @@ -515,7 +515,7 @@

    QF_LIRA (Single Query Track)

    - + MathSAT5n 0 553.70153.70151422 @@ -524,7 +524,7 @@

    QF_LIRA (Single Query Track)

    - + CVC4 0 554.4554.44951422 @@ -533,7 +533,7 @@

    QF_LIRA (Single Query Track)

    - + SMTInterpol 0 2125.591122.05720255 @@ -542,7 +542,7 @@

    QF_LIRA (Single Query Track)

    - + SMTInterpol-fixedn 0 2125.739122.06920255 @@ -566,7 +566,6 @@

    QF_LIRA (Single Query Track)

    - + - diff --git a/archive/2020/results/qf-lira-unsat-core.html b/archive/2020/results/qf-lira-unsat-core.html index f29e2c6a..d09a4292 100644 --- a/archive/2020/results/qf-lira-unsat-core.html +++ b/archive/2020/results/qf-lira-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LIRA (Unsat Core Track)

    Competition results for the QF_LIRA - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_LIRA (Unsat Core Track)

    Sequential PerformanceParallel Performance —— - - + + @@ -126,7 +126,7 @@

    QF_LIRA (Unsat Core Track)

    - + z3n 0 0 @@ -137,7 +137,7 @@

    QF_LIRA (Unsat Core Track)

    - + Yices2-fixedn 0 0 @@ -148,7 +148,7 @@

    QF_LIRA (Unsat Core Track)

    - + Yices2 0 0 @@ -159,7 +159,7 @@

    QF_LIRA (Unsat Core Track)

    - + CVC4-uc 0 0 @@ -170,7 +170,7 @@

    QF_LIRA (Unsat Core Track)

    - + MathSAT5n 0 0 @@ -181,7 +181,7 @@

    QF_LIRA (Unsat Core Track)

    - + SMTInterpol 0 0 @@ -192,7 +192,7 @@

    QF_LIRA (Unsat Core Track)

    - + SMTInterpol-fixedn 0 0 @@ -214,7 +214,7 @@

    QF_LIRA (Unsat Core Track)

    - + z3n 0 0627.076627.2210 @@ -223,7 +223,7 @@

    QF_LIRA (Unsat Core Track)

    - + Yices2-fixedn 0 01198.2471198.6680 @@ -232,7 +232,7 @@

    QF_LIRA (Unsat Core Track)

    - + Yices2 0 01200.8911200.8911 @@ -241,7 +241,7 @@

    QF_LIRA (Unsat Core Track)

    - + CVC4-uc 0 01213.3841213.3831 @@ -250,7 +250,7 @@

    QF_LIRA (Unsat Core Track)

    - + MathSAT5n 0 01263.7021263.7030 @@ -259,7 +259,7 @@

    QF_LIRA (Unsat Core Track)

    - + SMTInterpol-fixedn 0 02489.2022460.8542 @@ -268,7 +268,7 @@

    QF_LIRA (Unsat Core Track)

    - + SMTInterpol 0 02488.2412461.2942 @@ -292,7 +292,6 @@

    QF_LIRA (Unsat Core Track)

    - + - diff --git a/archive/2020/results/qf-lra-incremental.html b/archive/2020/results/qf-lra-incremental.html index a485072e..31aede8c 100644 --- a/archive/2020/results/qf-lra-incremental.html +++ b/archive/2020/results/qf-lra-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LRA (Incremental Track)

    Competition results for the QF_LRA - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_LRA (Incremental Track)

    Parallel Performance Yices2 incremental - - + + @@ -124,7 +124,7 @@

    QF_LRA (Incremental Track)

    - + 2018-MathSAT (incremental)n 0 12378473.3218472.9092785 @@ -133,7 +133,7 @@

    QF_LRA (Incremental Track)

    - + MathSAT5n 0 97811011.88711011.7325379 @@ -142,7 +142,7 @@

    QF_LRA (Incremental Track)

    - + Yices2-fixed incrementaln 0 91312000.012000.060210 @@ -151,7 +151,7 @@

    QF_LRA (Incremental Track)

    - + Yices2 incremental 0 91112000.012000.060410 @@ -160,7 +160,7 @@

    QF_LRA (Incremental Track)

    - + OpenSMT 0 88410184.18510184.4666317 @@ -169,7 +169,7 @@

    QF_LRA (Incremental Track)

    - + SMTInterpol 0 65710263.43110202.7648588 @@ -178,7 +178,7 @@

    QF_LRA (Incremental Track)

    - + SMTInterpol-fixedn 0 65610264.96310201.6078598 @@ -187,7 +187,7 @@

    QF_LRA (Incremental Track)

    - + CVC4-inc 0 62812000.012000.088710 @@ -196,7 +196,7 @@

    QF_LRA (Incremental Track)

    - + z3n 0 61412000.012000.090110 @@ -220,7 +220,6 @@

    QF_LRA (Incremental Track)

    - + - diff --git a/archive/2020/results/qf-lra-model-validation.html b/archive/2020/results/qf-lra-model-validation.html index c8989467..c2727deb 100644 --- a/archive/2020/results/qf-lra-model-validation.html +++ b/archive/2020/results/qf-lra-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LRA (Model Validation Track)

    Competition results for the QF_LRA - + division - + in the Model Validation Track.

    @@ -119,7 +119,7 @@

    QF_LRA (Model Validation Track)

    - + OpenSMT 0 366 @@ -130,7 +130,7 @@

    QF_LRA (Model Validation Track)

    - + Yices2 Model Validation 0 361 @@ -141,7 +141,7 @@

    QF_LRA (Model Validation Track)

    - + Yices2-fixed Model Validationn 0 361 @@ -152,7 +152,7 @@

    QF_LRA (Model Validation Track)

    - + z3n 0 361 @@ -163,7 +163,7 @@

    QF_LRA (Model Validation Track)

    - + CVC4-mv 0 360 @@ -174,7 +174,7 @@

    QF_LRA (Model Validation Track)

    - + SMTInterpol-fixedn 0 351 @@ -185,7 +185,7 @@

    QF_LRA (Model Validation Track)

    - + SMTInterpol 2* 350 @@ -196,7 +196,7 @@

    QF_LRA (Model Validation Track)

    - + MathSAT5-mvn 2* 345 @@ -218,7 +218,7 @@

    QF_LRA (Model Validation Track)

    - + OpenSMT 0 36623197.62723146.59312 @@ -227,7 +227,7 @@

    QF_LRA (Model Validation Track)

    - + Yices2 Model Validation 0 36127233.07227220.21217 @@ -236,7 +236,7 @@

    QF_LRA (Model Validation Track)

    - + Yices2-fixed Model Validationn 0 36127294.95327272.74417 @@ -245,7 +245,7 @@

    QF_LRA (Model Validation Track)

    - + z3n 0 36130904.96130889.86417 @@ -254,7 +254,7 @@

    QF_LRA (Model Validation Track)

    - + CVC4-mv 0 36036181.65936182.57818 @@ -263,7 +263,7 @@

    QF_LRA (Model Validation Track)

    - + SMTInterpol-fixedn 0 35154948.51152217.77927 @@ -272,7 +272,7 @@

    QF_LRA (Model Validation Track)

    - + SMTInterpol 2* 35054559.69951766.01326 @@ -281,7 +281,7 @@

    QF_LRA (Model Validation Track)

    - + MathSAT5-mvn 2* 34553783.17353763.35131 @@ -311,7 +311,6 @@

    QF_LRA (Model Validation Track)

    - + - diff --git a/archive/2020/results/qf-lra-single-query.html b/archive/2020/results/qf-lra-single-query.html index 6cc0a3f0..9847e997 100644 --- a/archive/2020/results/qf-lra-single-query.html +++ b/archive/2020/results/qf-lra-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LRA (Single Query Track)

    Competition results for the QF_LRA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_LRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) OpenSMTOpenSMTOpenSMT - - + + OpenSMT - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_LRA (Single Query Track)

    - + OpenSMT 0 406 @@ -142,7 +142,7 @@

    QF_LRA (Single Query Track)

    - + 2019-SPASS-SATTn 0 405 @@ -153,7 +153,7 @@

    QF_LRA (Single Query Track)

    - + 2019-Par4n 0 396 @@ -164,7 +164,7 @@

    QF_LRA (Single Query Track)

    - + CVC4 0 391 @@ -175,7 +175,7 @@

    QF_LRA (Single Query Track)

    - + Yices2 0 389 @@ -186,7 +186,7 @@

    QF_LRA (Single Query Track)

    - + Yices2-fixedn 0 389 @@ -197,7 +197,7 @@

    QF_LRA (Single Query Track)

    - + z3n 0 385 @@ -208,7 +208,7 @@

    QF_LRA (Single Query Track)

    - + veriT 0 377 @@ -219,7 +219,7 @@

    QF_LRA (Single Query Track)

    - + SMTInterpol-fixedn 0 358 @@ -230,7 +230,7 @@

    QF_LRA (Single Query Track)

    - + SMTInterpol 0 354 @@ -241,7 +241,7 @@

    QF_LRA (Single Query Track)

    - + MathSAT5n 0 353 @@ -263,7 +263,7 @@

    QF_LRA (Single Query Track)

    - + 2019-Par4n 0 412101647.57240989.8884122181941717 @@ -272,7 +272,7 @@

    QF_LRA (Single Query Track)

    - + OpenSMT 0 40644408.80544349.4184062141922323 @@ -281,7 +281,7 @@

    QF_LRA (Single Query Track)

    - + 2019-SPASS-SATTn 0 40549277.54849246.064052151902424 @@ -290,7 +290,7 @@

    QF_LRA (Single Query Track)

    - + CVC4 0 39183469.3183576.7693912041873838 @@ -299,7 +299,7 @@

    QF_LRA (Single Query Track)

    - + Yices2 0 38966778.42266726.633892091804040 @@ -308,7 +308,7 @@

    QF_LRA (Single Query Track)

    - + Yices2-fixedn 0 38966852.01766838.33892091804040 @@ -317,7 +317,7 @@

    QF_LRA (Single Query Track)

    - + z3n 0 38578661.17278633.5353852051804444 @@ -326,7 +326,7 @@

    QF_LRA (Single Query Track)

    - + veriT 0 37791363.64691354.8033771971805252 @@ -335,7 +335,7 @@

    QF_LRA (Single Query Track)

    - + SMTInterpol-fixedn 0 358137373.773132691.9813582021567171 @@ -344,7 +344,7 @@

    QF_LRA (Single Query Track)

    - + SMTInterpol 0 357137030.506132435.0073572011567272 @@ -353,7 +353,7 @@

    QF_LRA (Single Query Track)

    - + MathSAT5n 0 353121713.847121718.8113531961577676 @@ -373,7 +373,7 @@

    QF_LRA (Single Query Track)

    - + 2019-Par4n 0 21831105.6613272.57218218021117 @@ -382,7 +382,7 @@

    QF_LRA (Single Query Track)

    - + 2019-SPASS-SATTn 0 21517727.83817717.084215215021424 @@ -391,7 +391,7 @@

    QF_LRA (Single Query Track)

    - + OpenSMT 0 21420594.62820588.407214214021523 @@ -400,7 +400,7 @@

    QF_LRA (Single Query Track)

    - + Yices2-fixedn 0 20924065.57224060.634209209022040 @@ -409,7 +409,7 @@

    QF_LRA (Single Query Track)

    - + Yices2 0 20924131.69524079.949209209022040 @@ -418,7 +418,7 @@

    QF_LRA (Single Query Track)

    - + z3n 0 20531111.67731113.216205205022444 @@ -427,7 +427,7 @@

    QF_LRA (Single Query Track)

    - + CVC4 0 20444407.13844506.088204204022538 @@ -436,7 +436,7 @@

    QF_LRA (Single Query Track)

    - + SMTInterpol-fixedn 0 20250320.97547621.971202202022771 @@ -445,7 +445,7 @@

    QF_LRA (Single Query Track)

    - + SMTInterpol 0 20149935.47547269.123201201022872 @@ -454,7 +454,7 @@

    QF_LRA (Single Query Track)

    - + veriT 0 19746453.61946442.852197197023252 @@ -463,7 +463,7 @@

    QF_LRA (Single Query Track)

    - + MathSAT5n 0 19649066.07849068.665196196023376 @@ -483,7 +483,7 @@

    QF_LRA (Single Query Track)

    - + 2019-Par4n 0 19465741.91222917.318194019423517 @@ -492,7 +492,7 @@

    QF_LRA (Single Query Track)

    - + OpenSMT 0 19219014.17718961.011192019223723 @@ -501,7 +501,7 @@

    QF_LRA (Single Query Track)

    - + 2019-SPASS-SATTn 0 19026749.7126728.976190019023924 @@ -510,7 +510,7 @@

    QF_LRA (Single Query Track)

    - + CVC4 0 18734262.17234270.681187018724238 @@ -519,7 +519,7 @@

    QF_LRA (Single Query Track)

    - + Yices2 0 18037846.72737846.681180018024940 @@ -528,7 +528,7 @@

    QF_LRA (Single Query Track)

    - + Yices2-fixedn 0 18037986.44537977.666180018024940 @@ -537,7 +537,7 @@

    QF_LRA (Single Query Track)

    - + veriT 0 18040110.02740111.95180018024952 @@ -546,7 +546,7 @@

    QF_LRA (Single Query Track)

    - + z3n 0 18042749.49642720.319180018024944 @@ -555,7 +555,7 @@

    QF_LRA (Single Query Track)

    - + MathSAT5n 0 15767847.76967850.146157015727276 @@ -564,7 +564,7 @@

    QF_LRA (Single Query Track)

    - + SMTInterpol-fixedn 0 15682252.79880270.01156015627371 @@ -573,7 +573,7 @@

    QF_LRA (Single Query Track)

    - + SMTInterpol 0 15682295.03180365.884156015627372 @@ -593,7 +593,7 @@

    QF_LRA (Single Query Track)

    - + Yices2-fixedn 0 3123717.2793701.814312181131117117 @@ -602,7 +602,7 @@

    QF_LRA (Single Query Track)

    - + Yices2 0 3113705.5523701.613311180131118118 @@ -611,7 +611,7 @@

    QF_LRA (Single Query Track)

    - + 2019-SPASS-SATTn 0 3064150.1674116.67306171135123123 @@ -620,7 +620,7 @@

    QF_LRA (Single Query Track)

    - + OpenSMT 0 3054144.0494112.369305155150124124 @@ -629,7 +629,7 @@

    QF_LRA (Single Query Track)

    - + 2019-Par4n 0 2947490.4234341.293294162132135135 @@ -638,7 +638,7 @@

    QF_LRA (Single Query Track)

    - + z3n 0 2594951.2174920.281259146113170170 @@ -647,7 +647,7 @@

    QF_LRA (Single Query Track)

    - + CVC4 0 2585068.7215061.763258142116171171 @@ -656,7 +656,7 @@

    QF_LRA (Single Query Track)

    - + veriT 0 2564934.9314922.593256133123173173 @@ -665,7 +665,7 @@

    QF_LRA (Single Query Track)

    - + MathSAT5n 0 2454985.4074985.539245141104184184 @@ -674,7 +674,7 @@

    QF_LRA (Single Query Track)

    - + SMTInterpol-fixedn 0 2127316.7316082.11621212983217217 @@ -683,7 +683,7 @@

    QF_LRA (Single Query Track)

    - + SMTInterpol 0 2127311.956084.70221213082217217 @@ -707,7 +707,6 @@

    QF_LRA (Single Query Track)

    - + - diff --git a/archive/2020/results/qf-lra-unsat-core.html b/archive/2020/results/qf-lra-unsat-core.html index 9fc88ee2..05919123 100644 --- a/archive/2020/results/qf-lra-unsat-core.html +++ b/archive/2020/results/qf-lra-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LRA (Unsat Core Track)

    Competition results for the QF_LRA - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_LRA (Unsat Core Track)

    Sequential PerformanceParallel Performance Yices2Yices2 - - + + @@ -126,7 +126,7 @@

    QF_LRA (Unsat Core Track)

    - + Yices2 0 146931 @@ -137,7 +137,7 @@

    QF_LRA (Unsat Core Track)

    - + Yices2-fixedn 0 146931 @@ -148,7 +148,7 @@

    QF_LRA (Unsat Core Track)

    - + CVC4-uc 0 143126 @@ -159,7 +159,7 @@

    QF_LRA (Unsat Core Track)

    - + z3n 0 121290 @@ -170,7 +170,7 @@

    QF_LRA (Unsat Core Track)

    - + MathSAT5n 0 112489 @@ -181,7 +181,7 @@

    QF_LRA (Unsat Core Track)

    - + SMTInterpol-fixedn 0 95849 @@ -192,7 +192,7 @@

    QF_LRA (Unsat Core Track)

    - + SMTInterpol 0 88745 @@ -214,7 +214,7 @@

    QF_LRA (Unsat Core Track)

    - + Yices2 0 14693171274.57771276.60539 @@ -223,7 +223,7 @@

    QF_LRA (Unsat Core Track)

    - + Yices2-fixedn 0 14693171328.63571323.839 @@ -232,7 +232,7 @@

    QF_LRA (Unsat Core Track)

    - + CVC4-uc 0 14312641921.09541812.05914 @@ -241,7 +241,7 @@

    QF_LRA (Unsat Core Track)

    - + z3n 0 12129065702.53365660.97631 @@ -250,7 +250,7 @@

    QF_LRA (Unsat Core Track)

    - + MathSAT5n 0 112489146673.732146677.817103 @@ -259,7 +259,7 @@

    QF_LRA (Unsat Core Track)

    - + SMTInterpol-fixedn 0 95904111463.026108293.90160 @@ -268,7 +268,7 @@

    QF_LRA (Unsat Core Track)

    - + SMTInterpol 0 88800111279.97108095.8160 @@ -292,7 +292,6 @@

    QF_LRA (Unsat Core Track)

    - + - diff --git a/archive/2020/results/qf-nia-incremental.html b/archive/2020/results/qf-nia-incremental.html index 9559d525..30801a49 100644 --- a/archive/2020/results/qf-nia-incremental.html +++ b/archive/2020/results/qf-nia-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_NIA (Incremental Track)

    Competition results for the QF_NIA - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_NIA (Incremental Track)

    Parallel Performance CVC4-inc - - + + @@ -124,7 +124,7 @@

    QF_NIA (Incremental Track)

    - + MathSAT5n 0 41816571699.7181484.13100 @@ -133,7 +133,7 @@

    QF_NIA (Incremental Track)

    - + 2019-MathSAT-defaultn 0 41816571811.2921592.56100 @@ -142,7 +142,7 @@

    QF_NIA (Incremental Track)

    - + CVC4-inc 0 32641198848.1518745.1559175384 @@ -151,7 +151,7 @@

    QF_NIA (Incremental Track)

    - + z3n 0 29079054851.6764724.2212737523 @@ -160,7 +160,7 @@

    QF_NIA (Incremental Track)

    - + Yices2-fixed incrementaln 0 19816012000.012000.0398349710 @@ -169,7 +169,7 @@

    QF_NIA (Incremental Track)

    - + Yices2 incremental 4 1106423915.83911.71740710152 @@ -193,7 +193,6 @@

    QF_NIA (Incremental Track)

    - + - diff --git a/archive/2020/results/qf-nia-single-query.html b/archive/2020/results/qf-nia-single-query.html index 974db553..72ba91fe 100644 --- a/archive/2020/results/qf-nia-single-query.html +++ b/archive/2020/results/qf-nia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_NIA (Single Query Track)

    Competition results for the QF_NIA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_NIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) CVC4CVC4CVC4 - - + + Yices2 - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_NIA (Single Query Track)

    - + 2019-Par4n 0 7009 @@ -142,7 +142,7 @@

    QF_NIA (Single Query Track)

    - + CVC4 0 6345 @@ -153,7 +153,7 @@

    QF_NIA (Single Query Track)

    - + MathSAT5n 0 6330 @@ -164,7 +164,7 @@

    QF_NIA (Single Query Track)

    - + Yices2 0 6074 @@ -175,7 +175,7 @@

    QF_NIA (Single Query Track)

    - + Yices2-fixedn 0 6073 @@ -186,7 +186,7 @@

    QF_NIA (Single Query Track)

    - + z3n 0 5955 @@ -197,7 +197,7 @@

    QF_NIA (Single Query Track)

    - + AProVE 0 3184 @@ -208,7 +208,7 @@

    QF_NIA (Single Query Track)

    - + SMT-RAT 0 1996 @@ -230,7 +230,7 @@

    QF_NIA (Single Query Track)

    - + 2019-Par4n 0 71833130972.5352611375.01771834952223120121995 @@ -239,7 +239,7 @@

    QF_NIA (Single Query Track)

    - + CVC4 0 63464564678.2274565830.62663464594175228492849 @@ -248,7 +248,7 @@

    QF_NIA (Single Query Track)

    - + MathSAT5n 0 63303793231.753792607.6263304377195328652865 @@ -257,7 +257,7 @@

    QF_NIA (Single Query Track)

    - + Yices2 0 60743893742.9763893513.49460744075199931213121 @@ -266,7 +266,7 @@

    QF_NIA (Single Query Track)

    - + Yices2-fixedn 0 60733902022.3713903316.01660734074199931223122 @@ -275,7 +275,7 @@

    QF_NIA (Single Query Track)

    - + z3n 0 59553862486.7883862034.65459554334162132402646 @@ -284,7 +284,7 @@

    QF_NIA (Single Query Track)

    - + AProVE 0 31867388501.5187377531.14231863186060095935 @@ -293,7 +293,7 @@

    QF_NIA (Single Query Track)

    - + SMT-RAT 0 19968809200.4268809104.3291996180119571997166 @@ -313,7 +313,7 @@

    QF_NIA (Single Query Track)

    - + 2019-Par4n 0 49521033463.156640144.42749524952042431995 @@ -322,7 +322,7 @@

    QF_NIA (Single Query Track)

    - + CVC4 0 45941940019.3921941282.2945944594046012849 @@ -331,7 +331,7 @@

    QF_NIA (Single Query Track)

    - + MathSAT5n 0 43771460777.5631460296.1543774377048182865 @@ -340,7 +340,7 @@

    QF_NIA (Single Query Track)

    - + z3n 0 43341400034.4391399717.343344334048612646 @@ -349,7 +349,7 @@

    QF_NIA (Single Query Track)

    - + Yices2 0 40751655800.3271655601.04840754075051203121 @@ -358,7 +358,7 @@

    QF_NIA (Single Query Track)

    - + Yices2-fixedn 0 40741661985.0211662501.46140744074051213122 @@ -367,7 +367,7 @@

    QF_NIA (Single Query Track)

    - + AProVE 0 31862850073.4992839113.42931863186060095935 @@ -376,7 +376,7 @@

    QF_NIA (Single Query Track)

    - + SMT-RAT 0 18014459454.6934459259.79818011801073947166 @@ -396,7 +396,7 @@

    QF_NIA (Single Query Track)

    - + 2019-Par4n 0 2231265108.628160809.0722310223169641995 @@ -405,7 +405,7 @@

    QF_NIA (Single Query Track)

    - + Yices2 0 1999427142.649427112.44719990199971963121 @@ -414,7 +414,7 @@

    QF_NIA (Single Query Track)

    - + Yices2-fixedn 0 1999429237.351430014.55519990199971963122 @@ -423,7 +423,7 @@

    QF_NIA (Single Query Track)

    - + MathSAT5n 0 1953521654.187521511.4719530195372422865 @@ -432,7 +432,7 @@

    QF_NIA (Single Query Track)

    - + CVC4 0 1752813858.835813748.33617520175274432849 @@ -441,7 +441,7 @@

    QF_NIA (Single Query Track)

    - + z3n 0 1621908614.652908527.48216210162175742646 @@ -450,7 +450,7 @@

    QF_NIA (Single Query Track)

    - + SMT-RAT 0 1952538945.7332539044.531195019590007166 @@ -459,7 +459,7 @@

    QF_NIA (Single Query Track)

    - + AProVE 0 02790001.6762790001.08200091955935 @@ -479,7 +479,7 @@

    QF_NIA (Single Query Track)

    - + 2019-Par4n 0 6288127657.45386631.86562884279200929072890 @@ -488,7 +488,7 @@

    QF_NIA (Single Query Track)

    - + Yices2 0 5345104190.735104002.70253453486185938503850 @@ -497,7 +497,7 @@

    QF_NIA (Single Query Track)

    - + Yices2-fixedn 0 5337104477.715104357.87453373480185738583858 @@ -506,7 +506,7 @@

    QF_NIA (Single Query Track)

    - + MathSAT5n 0 4687129075.367128734.01246873132155545084508 @@ -515,7 +515,7 @@

    QF_NIA (Single Query Track)

    - + z3n 0 4232139917.114139670.71542323051118149634940 @@ -524,7 +524,7 @@

    QF_NIA (Single Query Track)

    - + CVC4 0 3517151428.081151178.56735172359115856785678 @@ -533,7 +533,7 @@

    QF_NIA (Single Query Track)

    - + AProVE 0 2404175808.845169727.71724042404067916717 @@ -542,7 +542,7 @@

    QF_NIA (Single Query Track)

    - + SMT-RAT 0 1333195190.621195031.3771333116317078627855 @@ -566,7 +566,6 @@

    QF_NIA (Single Query Track)

    - + - diff --git a/archive/2020/results/qf-nira-single-query.html b/archive/2020/results/qf-nira-single-query.html index e2a7865b..e47a25c6 100644 --- a/archive/2020/results/qf-nira-single-query.html +++ b/archive/2020/results/qf-nira-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_NIRA (Single Query Track)

    Competition results for the QF_NIRA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_NIRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) SMT-RATSMT-RAT— - - + + SMT-RAT - - + + — - + @@ -131,7 +131,7 @@

    QF_NIRA (Single Query Track)

    - + SMT-RAT 0 2 @@ -142,7 +142,7 @@

    QF_NIRA (Single Query Track)

    - + 2018-SMTRAT-Ratn 0 2 @@ -153,7 +153,7 @@

    QF_NIRA (Single Query Track)

    - + z3n 0 1 @@ -164,7 +164,7 @@

    QF_NIRA (Single Query Track)

    - + CVC4 0 1 @@ -175,7 +175,7 @@

    QF_NIRA (Single Query Track)

    - + MathSAT5n 0 1 @@ -186,7 +186,7 @@

    QF_NIRA (Single Query Track)

    - + Yices2 0 0 @@ -197,7 +197,7 @@

    QF_NIRA (Single Query Track)

    - + Yices2-fixedn 0 0 @@ -219,7 +219,7 @@

    QF_NIRA (Single Query Track)

    - + SMT-RAT 0 2262.708262.72120200 @@ -228,7 +228,7 @@

    QF_NIRA (Single Query Track)

    - + 2018-SMTRAT-Ratn 0 2314.192314.22220200 @@ -237,7 +237,7 @@

    QF_NIRA (Single Query Track)

    - + z3n 0 11202.1371202.13710111 @@ -246,7 +246,7 @@

    QF_NIRA (Single Query Track)

    - + CVC4 0 11239.6611239.6610111 @@ -255,7 +255,7 @@

    QF_NIRA (Single Query Track)

    - + MathSAT5n 0 11425.7391425.73210111 @@ -264,7 +264,7 @@

    QF_NIRA (Single Query Track)

    - + Yices2 0 02400.02400.000022 @@ -273,7 +273,7 @@

    QF_NIRA (Single Query Track)

    - + Yices2-fixedn 0 02400.02400.000022 @@ -293,7 +293,7 @@

    QF_NIRA (Single Query Track)

    - + 2018-SMTRAT-Ratn 0 00.00.000020 @@ -302,7 +302,7 @@

    QF_NIRA (Single Query Track)

    - + SMT-RAT 0 00.00.000020 @@ -311,7 +311,7 @@

    QF_NIRA (Single Query Track)

    - + CVC4 0 00.00.000021 @@ -320,7 +320,7 @@

    QF_NIRA (Single Query Track)

    - + z3n 0 00.00.000021 @@ -329,7 +329,7 @@

    QF_NIRA (Single Query Track)

    - + Yices2 0 00.00.000022 @@ -338,7 +338,7 @@

    QF_NIRA (Single Query Track)

    - + MathSAT5n 0 00.00.000021 @@ -347,7 +347,7 @@

    QF_NIRA (Single Query Track)

    - + Yices2-fixedn 0 00.00.000022 @@ -367,7 +367,7 @@

    QF_NIRA (Single Query Track)

    - + SMT-RAT 0 2262.708262.72120200 @@ -376,7 +376,7 @@

    QF_NIRA (Single Query Track)

    - + 2018-SMTRAT-Ratn 0 2314.192314.22220200 @@ -385,7 +385,7 @@

    QF_NIRA (Single Query Track)

    - + z3n 0 11202.1371202.13710111 @@ -394,7 +394,7 @@

    QF_NIRA (Single Query Track)

    - + CVC4 0 11239.6611239.6610111 @@ -403,7 +403,7 @@

    QF_NIRA (Single Query Track)

    - + MathSAT5n 0 11425.7391425.73210111 @@ -412,7 +412,7 @@

    QF_NIRA (Single Query Track)

    - + Yices2 0 02400.02400.000022 @@ -421,7 +421,7 @@

    QF_NIRA (Single Query Track)

    - + Yices2-fixedn 0 02400.02400.000022 @@ -441,7 +441,7 @@

    QF_NIRA (Single Query Track)

    - + z3n 0 126.13726.13710111 @@ -450,7 +450,7 @@

    QF_NIRA (Single Query Track)

    - + 2018-SMTRAT-Ratn 0 048.048.000022 @@ -459,7 +459,7 @@

    QF_NIRA (Single Query Track)

    - + SMT-RAT 0 048.048.000022 @@ -468,7 +468,7 @@

    QF_NIRA (Single Query Track)

    - + CVC4 0 048.048.000022 @@ -477,7 +477,7 @@

    QF_NIRA (Single Query Track)

    - + Yices2 0 048.048.000022 @@ -486,7 +486,7 @@

    QF_NIRA (Single Query Track)

    - + MathSAT5n 0 048.048.000022 @@ -495,7 +495,7 @@

    QF_NIRA (Single Query Track)

    - + Yices2-fixedn 0 048.048.000022 @@ -519,7 +519,6 @@

    QF_NIRA (Single Query Track)

    - + - diff --git a/archive/2020/results/qf-nra-single-query.html b/archive/2020/results/qf-nra-single-query.html index a7dc812f..3b92a3a7 100644 --- a/archive/2020/results/qf-nra-single-query.html +++ b/archive/2020/results/qf-nra-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_NRA (Single Query Track)

    Competition results for the QF_NRA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_NRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Yices2Yices2Yices2 - - + + Yices2 - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_NRA (Single Query Track)

    - + 2019-Par4n 0 1998 @@ -142,7 +142,7 @@

    QF_NRA (Single Query Track)

    - + Yices2-fixedn 0 1770 @@ -153,7 +153,7 @@

    QF_NRA (Single Query Track)

    - + Yices2 0 1770 @@ -164,7 +164,7 @@

    QF_NRA (Single Query Track)

    - + z3n 0 1658 @@ -175,7 +175,7 @@

    QF_NRA (Single Query Track)

    - + SMT-RAT-MCSAT 0 1564 @@ -186,7 +186,7 @@

    QF_NRA (Single Query Track)

    - + veriT+raSAT+Redlog 0 1420 @@ -197,7 +197,7 @@

    QF_NRA (Single Query Track)

    - + SMT-RAT-CAlC 0 1377 @@ -208,7 +208,7 @@

    QF_NRA (Single Query Track)

    - + CVC4 0 1325 @@ -219,7 +219,7 @@

    QF_NRA (Single Query Track)

    - + MathSAT5n 0 1292 @@ -241,7 +241,7 @@

    QF_NRA (Single Query Track)

    - + 2019-Par4n 0 2010333813.256287439.95820109461064220168 @@ -250,7 +250,7 @@

    QF_NRA (Single Query Track)

    - + Yices2-fixedn 0 1770580033.882580073.8961770816954460460 @@ -259,7 +259,7 @@

    QF_NRA (Single Query Track)

    - + Yices2 0 1770580374.371580298.6471770816954460460 @@ -268,7 +268,7 @@

    QF_NRA (Single Query Track)

    - + z3n 0 1658708610.427708547.1951658830828572566 @@ -277,7 +277,7 @@

    QF_NRA (Single Query Track)

    - + SMT-RAT-MCSAT 0 1564808820.671808759.1981564751813666632 @@ -286,7 +286,7 @@

    QF_NRA (Single Query Track)

    - + veriT+raSAT+Redlog 0 1420975761.947975644.6941420635785810808 @@ -295,7 +295,7 @@

    QF_NRA (Single Query Track)

    - + SMT-RAT-CAlC 0 13771045783.9471045762.0181377652725853845 @@ -304,7 +304,7 @@

    QF_NRA (Single Query Track)

    - + CVC4 0 13261179005.9291179094.7761326385941904904 @@ -313,7 +313,7 @@

    QF_NRA (Single Query Track)

    - + MathSAT5n 0 12921156089.6511156039.5821292361931938938 @@ -333,7 +333,7 @@

    QF_NRA (Single Query Track)

    - + 2019-Par4n 0 946134294.83104843.31594694601284168 @@ -342,7 +342,7 @@

    QF_NRA (Single Query Track)

    - + z3n 0 830237748.324237699.53683083001400566 @@ -351,7 +351,7 @@

    QF_NRA (Single Query Track)

    - + Yices2 0 816254741.897254706.17681681601414460 @@ -360,7 +360,7 @@

    QF_NRA (Single Query Track)

    - + Yices2-fixedn 0 816254729.098254746.90181681601414460 @@ -369,7 +369,7 @@

    QF_NRA (Single Query Track)

    - + SMT-RAT-MCSAT 0 751316588.126316577.98675175101479632 @@ -378,7 +378,7 @@

    QF_NRA (Single Query Track)

    - + SMT-RAT-CAlC 0 652443829.654443811.59165265201578845 @@ -387,7 +387,7 @@

    QF_NRA (Single Query Track)

    - + veriT+raSAT+Redlog 0 635465121.218465053.78863563501595808 @@ -396,7 +396,7 @@

    QF_NRA (Single Query Track)

    - + CVC4 0 385826466.362826835.5638538501845904 @@ -405,7 +405,7 @@

    QF_NRA (Single Query Track)

    - + MathSAT5n 0 361800695.491800697.57236136101869938 @@ -425,7 +425,7 @@

    QF_NRA (Single Query Track)

    - + 2019-Par4n 0 106442318.42625396.6431064010641166168 @@ -434,7 +434,7 @@

    QF_NRA (Single Query Track)

    - + Yices2-fixedn 0 954168104.784168126.99595409541276460 @@ -443,7 +443,7 @@

    QF_NRA (Single Query Track)

    - + Yices2 0 954168432.474168392.47195409541276460 @@ -452,7 +452,7 @@

    QF_NRA (Single Query Track)

    - + CVC4 0 941195339.568195059.21794109411289904 @@ -461,7 +461,7 @@

    QF_NRA (Single Query Track)

    - + MathSAT5n 0 931198194.16198142.0193109311299938 @@ -470,7 +470,7 @@

    QF_NRA (Single Query Track)

    - + z3n 0 828316040.786316026.3482808281402566 @@ -479,7 +479,7 @@

    QF_NRA (Single Query Track)

    - + SMT-RAT-MCSAT 0 813336463.282336411.55481308131417632 @@ -488,7 +488,7 @@

    QF_NRA (Single Query Track)

    - + veriT+raSAT+Redlog 0 785354551.18354501.65278507851445808 @@ -497,7 +497,7 @@

    QF_NRA (Single Query Track)

    - + SMT-RAT-CAlC 0 725444754.293444750.42772507251505845 @@ -517,7 +517,7 @@

    QF_NRA (Single Query Track)

    - + 2019-Par4n 0 188914257.48910323.97218898891000341289 @@ -526,7 +526,7 @@

    QF_NRA (Single Query Track)

    - + Yices2 0 166614357.31614358.1321666779887564564 @@ -535,7 +535,7 @@

    QF_NRA (Single Query Track)

    - + Yices2-fixedn 0 166614363.76714364.9131666779887564564 @@ -544,7 +544,7 @@

    QF_NRA (Single Query Track)

    - + z3n 0 159317711.91917691.0941593802791637632 @@ -553,7 +553,7 @@

    QF_NRA (Single Query Track)

    - + SMT-RAT-MCSAT 0 144520675.03120652.751445730715785777 @@ -562,7 +562,7 @@

    QF_NRA (Single Query Track)

    - + veriT+raSAT+Redlog 0 139420733.11320721.2031394619775836836 @@ -571,7 +571,7 @@

    QF_NRA (Single Query Track)

    - + SMT-RAT-CAlC 0 125125197.19525171.8471251646605979972 @@ -580,7 +580,7 @@

    QF_NRA (Single Query Track)

    - + MathSAT5n 0 116327996.38527995.378116331784610671067 @@ -589,7 +589,7 @@

    QF_NRA (Single Query Track)

    - + CVC4 0 100231768.35331728.2100219780512281228 @@ -613,7 +613,6 @@

    QF_NRA (Single Query Track)

    - + - diff --git a/archive/2020/results/qf-rdl-model-validation.html b/archive/2020/results/qf-rdl-model-validation.html index 483a9199..ab55062e 100644 --- a/archive/2020/results/qf-rdl-model-validation.html +++ b/archive/2020/results/qf-rdl-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_RDL (Model Validation Track)

    Competition results for the QF_RDL - + division - + in the Model Validation Track.

    @@ -119,7 +119,7 @@

    QF_RDL (Model Validation Track)

    - + Yices2-fixed Model Validationn 0 109 @@ -130,7 +130,7 @@

    QF_RDL (Model Validation Track)

    - + Yices2 Model Validation 0 109 @@ -141,7 +141,7 @@

    QF_RDL (Model Validation Track)

    - + CVC4-mv 0 106 @@ -152,7 +152,7 @@

    QF_RDL (Model Validation Track)

    - + MathSAT5-mvn 0 106 @@ -163,7 +163,7 @@

    QF_RDL (Model Validation Track)

    - + z3n 0 103 @@ -174,7 +174,7 @@

    QF_RDL (Model Validation Track)

    - + SMTInterpol 0 101 @@ -185,7 +185,7 @@

    QF_RDL (Model Validation Track)

    - + SMTInterpol-fixedn 0 101 @@ -207,7 +207,7 @@

    QF_RDL (Model Validation Track)

    - + Yices2-fixed Model Validationn 0 109495.298495.4020 @@ -216,7 +216,7 @@

    QF_RDL (Model Validation Track)

    - + Yices2 Model Validation 0 109495.323495.4040 @@ -225,7 +225,7 @@

    QF_RDL (Model Validation Track)

    - + CVC4-mv 0 1066981.046981.5533 @@ -234,7 +234,7 @@

    QF_RDL (Model Validation Track)

    - + MathSAT5-mvn 0 1067744.8387745.8423 @@ -243,7 +243,7 @@

    QF_RDL (Model Validation Track)

    - + z3n 0 1039489.4569485.4016 @@ -252,7 +252,7 @@

    QF_RDL (Model Validation Track)

    - + SMTInterpol 0 10115635.55614813.8078 @@ -261,7 +261,7 @@

    QF_RDL (Model Validation Track)

    - + SMTInterpol-fixedn 0 10115653.70314841.7918 @@ -285,7 +285,6 @@

    QF_RDL (Model Validation Track)

    - + - diff --git a/archive/2020/results/qf-rdl-single-query.html b/archive/2020/results/qf-rdl-single-query.html index cdfb0159..72622717 100644 --- a/archive/2020/results/qf-rdl-single-query.html +++ b/archive/2020/results/qf-rdl-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_RDL (Single Query Track)

    Competition results for the QF_RDL - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_RDL (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Yices2Yices2Yices2 - - + + Yices2 - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_RDL (Single Query Track)

    - + 2019-Yices 2.6.2n 0 213 @@ -142,7 +142,7 @@

    QF_RDL (Single Query Track)

    - + Yices2 0 213 @@ -153,7 +153,7 @@

    QF_RDL (Single Query Track)

    - + Yices2-fixedn 0 213 @@ -164,7 +164,7 @@

    QF_RDL (Single Query Track)

    - + CVC4 0 209 @@ -175,7 +175,7 @@

    QF_RDL (Single Query Track)

    - + z3n 0 207 @@ -186,7 +186,7 @@

    QF_RDL (Single Query Track)

    - + veriT 0 206 @@ -197,7 +197,7 @@

    QF_RDL (Single Query Track)

    - + MathSAT5n 0 198 @@ -208,7 +208,7 @@

    QF_RDL (Single Query Track)

    - + SMTInterpol 0 187 @@ -219,7 +219,7 @@

    QF_RDL (Single Query Track)

    - + SMTInterpol-fixedn 0 187 @@ -241,7 +241,7 @@

    QF_RDL (Single Query Track)

    - + 2019-Yices 2.6.2n 0 21342887.26442887.5472131051083434 @@ -250,7 +250,7 @@

    QF_RDL (Single Query Track)

    - + Yices2 0 21343121.18643109.9192131051083434 @@ -259,7 +259,7 @@

    QF_RDL (Single Query Track)

    - + Yices2-fixedn 0 21343122.52143121.8952131051083434 @@ -268,7 +268,7 @@

    QF_RDL (Single Query Track)

    - + CVC4 0 20952523.66152518.8242091021073838 @@ -277,7 +277,7 @@

    QF_RDL (Single Query Track)

    - + z3n 0 20753541.51653455.484207991084040 @@ -286,7 +286,7 @@

    QF_RDL (Single Query Track)

    - + veriT 0 20655097.72755098.4272061021044141 @@ -295,7 +295,7 @@

    QF_RDL (Single Query Track)

    - + MathSAT5n 0 19866828.68666784.066198102964949 @@ -304,7 +304,7 @@

    QF_RDL (Single Query Track)

    - + SMTInterpol 0 18788612.64287026.63518797906060 @@ -313,7 +313,7 @@

    QF_RDL (Single Query Track)

    - + SMTInterpol-fixedn 0 18788722.53887146.63218797906060 @@ -333,7 +333,7 @@

    QF_RDL (Single Query Track)

    - + 2019-Yices 2.6.2n 0 105466.779466.885105105014234 @@ -342,7 +342,7 @@

    QF_RDL (Single Query Track)

    - + Yices2-fixedn 0 105494.354494.497105105014234 @@ -351,7 +351,7 @@

    QF_RDL (Single Query Track)

    - + Yices2 0 105495.218495.303105105014234 @@ -360,7 +360,7 @@

    QF_RDL (Single Query Track)

    - + veriT 0 1026701.1816701.613102102014541 @@ -369,7 +369,7 @@

    QF_RDL (Single Query Track)

    - + CVC4 0 1026982.2376976.905102102014538 @@ -378,7 +378,7 @@

    QF_RDL (Single Query Track)

    - + MathSAT5n 0 1027719.6357720.442102102014549 @@ -387,7 +387,7 @@

    QF_RDL (Single Query Track)

    - + z3n 0 999439.5539354.5149999014840 @@ -396,7 +396,7 @@

    QF_RDL (Single Query Track)

    - + SMTInterpol 0 9715648.53814844.2739797015060 @@ -405,7 +405,7 @@

    QF_RDL (Single Query Track)

    - + SMTInterpol-fixedn 0 9715665.12914866.7649797015060 @@ -425,7 +425,7 @@

    QF_RDL (Single Query Track)

    - + 2019-Yices 2.6.2n 0 1082820.4852820.662108010813934 @@ -434,7 +434,7 @@

    QF_RDL (Single Query Track)

    - + Yices2 0 1083025.9683014.615108010813934 @@ -443,7 +443,7 @@

    QF_RDL (Single Query Track)

    - + Yices2-fixedn 0 1083028.1673027.398108010813934 @@ -452,7 +452,7 @@

    QF_RDL (Single Query Track)

    - + z3n 0 1084501.9634500.97108010813940 @@ -461,7 +461,7 @@

    QF_RDL (Single Query Track)

    - + CVC4 0 1075941.4245941.919107010714038 @@ -470,7 +470,7 @@

    QF_RDL (Single Query Track)

    - + veriT 0 1048796.5468796.814104010414341 @@ -479,7 +479,7 @@

    QF_RDL (Single Query Track)

    - + MathSAT5n 0 9619509.05219463.6249609615149 @@ -488,7 +488,7 @@

    QF_RDL (Single Query Track)

    - + SMTInterpol 0 9033364.10432582.3629009015760 @@ -497,7 +497,7 @@

    QF_RDL (Single Query Track)

    - + SMTInterpol-fixedn 0 9033457.40932679.8679009015760 @@ -517,7 +517,7 @@

    QF_RDL (Single Query Track)

    - + 2019-Yices 2.6.2n 0 1931631.5631631.72919398955454 @@ -526,7 +526,7 @@

    QF_RDL (Single Query Track)

    - + Yices2 0 1911675.2751663.87319198935656 @@ -535,7 +535,7 @@

    QF_RDL (Single Query Track)

    - + Yices2-fixedn 0 1911665.461664.65819198935656 @@ -544,7 +544,7 @@

    QF_RDL (Single Query Track)

    - + z3n 0 1802164.2442136.67718088926767 @@ -553,7 +553,7 @@

    QF_RDL (Single Query Track)

    - + veriT 0 1692304.8382304.86116982877878 @@ -562,7 +562,7 @@

    QF_RDL (Single Query Track)

    - + MathSAT5n 0 1652471.2052467.3516589768282 @@ -571,7 +571,7 @@

    QF_RDL (Single Query Track)

    - + CVC4 0 1652563.2892557.18516585808282 @@ -580,7 +580,7 @@

    QF_RDL (Single Query Track)

    - + SMTInterpol 0 1324090.0973365.3511327260115115 @@ -589,7 +589,7 @@

    QF_RDL (Single Query Track)

    - + SMTInterpol-fixedn 0 1324066.5343368.6871327260115115 @@ -613,7 +613,6 @@

    QF_RDL (Single Query Track)

    - + - diff --git a/archive/2020/results/qf-s-single-query.html b/archive/2020/results/qf-s-single-query.html index 21941b2d..5e98e893 100644 --- a/archive/2020/results/qf-s-single-query.html +++ b/archive/2020/results/qf-s-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_S (Single Query Track)

    Competition results for the QF_S - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_S (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) CVC4CVC4CVC4 - - + + CVC4 - - + + CVC4 - + @@ -131,7 +131,7 @@

    QF_S (Single Query Track)

    - + CVC4 0 922 @@ -142,7 +142,7 @@

    QF_S (Single Query Track)

    - + Z3str4 0 862 @@ -164,7 +164,7 @@

    QF_S (Single Query Track)

    - + CVC4 0 9226697.7756708.81592244148133 @@ -173,7 +173,7 @@

    QF_S (Single Query Track)

    - + Z3str4 0 86276291.86976224.5258623834796363 @@ -193,7 +193,7 @@

    QF_S (Single Query Track)

    - + CVC4 0 4415488.285499.20744144104843 @@ -202,7 +202,7 @@

    QF_S (Single Query Track)

    - + Z3str4 0 38372676.36672612.351383383054263 @@ -222,7 +222,7 @@

    QF_S (Single Query Track)

    - + CVC4 0 4819.4959.60848104814443 @@ -231,7 +231,7 @@

    QF_S (Single Query Track)

    - + Z3str4 0 4792415.5032412.174479047944663 @@ -251,7 +251,7 @@

    QF_S (Single Query Track)

    - + CVC4 0 905985.719969.1589054244812020 @@ -260,7 +260,7 @@

    QF_S (Single Query Track)

    - + Z3str4 0 8582022.1771957.618583794796767 @@ -284,7 +284,6 @@

    QF_S (Single Query Track)

    - + - diff --git a/archive/2020/results/qf-slia-single-query.html b/archive/2020/results/qf-slia-single-query.html index 4aa5943e..13daa336 100644 --- a/archive/2020/results/qf-slia-single-query.html +++ b/archive/2020/results/qf-slia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_SLIA (Single Query Track)

    Competition results for the QF_SLIA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_SLIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) CVC4CVC4CVC4 - - + + CVC4 - - + + CVC4 - + @@ -131,7 +131,7 @@

    QF_SLIA (Single Query Track)

    - + CVC4 0 26696 @@ -142,7 +142,7 @@

    QF_SLIA (Single Query Track)

    - + Z3str4 0 23362 @@ -164,7 +164,7 @@

    QF_SLIA (Single Query Track)

    - + CVC4 0 26696409225.784410833.113266961654010156234233 @@ -173,7 +173,7 @@

    QF_SLIA (Single Query Track)

    - + Z3str4 0 233624216352.4884214720.32523362133381002435683501 @@ -193,7 +193,7 @@

    QF_SLIA (Single Query Track)

    - + CVC4 0 16540139125.614140490.5971654016540010390233 @@ -202,7 +202,7 @@

    QF_SLIA (Single Query Track)

    - + Z3str4 0 133383868493.0193867185.06413338133380135923501 @@ -222,7 +222,7 @@

    QF_SLIA (Single Query Track)

    - + CVC4 0 1015627694.38727936.7331015601015616774233 @@ -231,7 +231,7 @@

    QF_SLIA (Single Query Track)

    - + Z3str4 0 10024119698.239119435.61810024010024169063501 @@ -251,7 +251,7 @@

    QF_SLIA (Single Query Track)

    - + CVC4 0 2616528442.19428360.891261651610010065765764 @@ -260,7 +260,7 @@

    QF_SLIA (Single Query Track)

    - + Z3str4 0 2332390164.63788562.58423323132991002436073541 @@ -284,7 +284,6 @@

    QF_SLIA (Single Query Track)

    - + - diff --git a/archive/2020/results/qf-uf-incremental.html b/archive/2020/results/qf-uf-incremental.html index 89325225..4f0cfcd6 100644 --- a/archive/2020/results/qf-uf-incremental.html +++ b/archive/2020/results/qf-uf-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UF (Incremental Track)

    Competition results for the QF_UF - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_UF (Incremental Track)

    Parallel Performance Yices2 incremental - - + + @@ -124,7 +124,7 @@

    QF_UF (Incremental Track)

    - + 2019-Yices 2.6.2 Incrementaln 0 61273.19361.38300 @@ -133,7 +133,7 @@

    QF_UF (Incremental Track)

    - + Yices2-fixed incrementaln 0 61277.41564.33800 @@ -142,7 +142,7 @@

    QF_UF (Incremental Track)

    - + Yices2 incremental 0 61278.50565.09700 @@ -151,7 +151,7 @@

    QF_UF (Incremental Track)

    - + z3n 0 612186.102173.30500 @@ -160,7 +160,7 @@

    QF_UF (Incremental Track)

    - + CVC4-inc 0 612438.578410.300 @@ -169,7 +169,7 @@

    QF_UF (Incremental Track)

    - + SMTInterpol 0 6121297.261469.07500 @@ -178,7 +178,7 @@

    QF_UF (Incremental Track)

    - + SMTInterpol-fixedn 0 6121291.405472.12600 @@ -187,7 +187,7 @@

    QF_UF (Incremental Track)

    - + OpenSMT 0 6112858.2482840.58111 @@ -196,7 +196,7 @@

    QF_UF (Incremental Track)

    - + MathSAT5n 0 574144.304127.049380 @@ -220,7 +220,6 @@

    QF_UF (Incremental Track)

    - + - diff --git a/archive/2020/results/qf-uf-single-query.html b/archive/2020/results/qf-uf-single-query.html index d53167d1..8a9fe3f6 100644 --- a/archive/2020/results/qf-uf-single-query.html +++ b/archive/2020/results/qf-uf-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UF (Single Query Track)

    Competition results for the QF_UF - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_UF (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Yices2Yices2Yices2 - - + + Yices2 - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_UF (Single Query Track)

    - + Yices2 0 2800 @@ -142,7 +142,7 @@

    QF_UF (Single Query Track)

    - + Yices2-fixedn 0 2800 @@ -153,7 +153,7 @@

    QF_UF (Single Query Track)

    - + 2019-Yices 2.6.2n 0 2800 @@ -164,7 +164,7 @@

    QF_UF (Single Query Track)

    - + 2019-Par4n 0 2800 @@ -175,7 +175,7 @@

    QF_UF (Single Query Track)

    - + veriT 0 2800 @@ -186,7 +186,7 @@

    QF_UF (Single Query Track)

    - + z3n 0 2799 @@ -197,7 +197,7 @@

    QF_UF (Single Query Track)

    - + OpenSMT 0 2798 @@ -208,7 +208,7 @@

    QF_UF (Single Query Track)

    - + CVC4 0 2797 @@ -219,7 +219,7 @@

    QF_UF (Single Query Track)

    - + SMTInterpol 0 2765 @@ -230,7 +230,7 @@

    QF_UF (Single Query Track)

    - + SMTInterpol-fixedn 0 2765 @@ -241,7 +241,7 @@

    QF_UF (Single Query Track)

    - + MathSAT5n 0 2745 @@ -252,7 +252,7 @@

    QF_UF (Single Query Track)

    - + Alt-Ergo 0 1164 @@ -274,7 +274,7 @@

    QF_UF (Single Query Track)

    - + 2019-Par4n 0 2800496.93376.24528001130167000 @@ -283,7 +283,7 @@

    QF_UF (Single Query Track)

    - + Yices2 0 2800374.008383.56428001130167000 @@ -292,7 +292,7 @@

    QF_UF (Single Query Track)

    - + 2019-Yices 2.6.2n 0 2800376.185386.99428001130167000 @@ -301,7 +301,7 @@

    QF_UF (Single Query Track)

    - + Yices2-fixedn 0 2800374.281388.79328001130167000 @@ -310,7 +310,7 @@

    QF_UF (Single Query Track)

    - + veriT 0 2800791.191790.4728001130167000 @@ -319,7 +319,7 @@

    QF_UF (Single Query Track)

    - + z3n 0 27995949.8485913.73527991130166911 @@ -328,7 +328,7 @@

    QF_UF (Single Query Track)

    - + OpenSMT 0 27989544.6659488.85727981130166822 @@ -337,7 +337,7 @@

    QF_UF (Single Query Track)

    - + CVC4 0 279711687.23111675.93427971130166733 @@ -346,7 +346,7 @@

    QF_UF (Single Query Track)

    - + SMTInterpol 0 276559323.13553413.4452765113016353535 @@ -355,7 +355,7 @@

    QF_UF (Single Query Track)

    - + SMTInterpol-fixedn 0 276559331.03353428.1122765113016353535 @@ -364,7 +364,7 @@

    QF_UF (Single Query Track)

    - + MathSAT5n 0 274545289.77145278.4942745111316325534 @@ -373,7 +373,7 @@

    QF_UF (Single Query Track)

    - + Alt-Ergo 0 14151594315.558791512.7571415014151385352 @@ -393,7 +393,7 @@

    QF_UF (Single Query Track)

    - + Yices2 0 113047.44751.94111301130016700 @@ -402,7 +402,7 @@

    QF_UF (Single Query Track)

    - + 2019-Par4n 0 113021.09253.34311301130016700 @@ -411,7 +411,7 @@

    QF_UF (Single Query Track)

    - + 2019-Yices 2.6.2n 0 113048.68253.86711301130016700 @@ -420,7 +420,7 @@

    QF_UF (Single Query Track)

    - + Yices2-fixedn 0 113047.50754.45211301130016700 @@ -429,7 +429,7 @@

    QF_UF (Single Query Track)

    - + veriT 0 1130107.604106.69411301130016700 @@ -438,7 +438,7 @@

    QF_UF (Single Query Track)

    - + z3n 0 1130199.771199.911301130016701 @@ -447,7 +447,7 @@

    QF_UF (Single Query Track)

    - + OpenSMT 0 1130429.615428.86911301130016702 @@ -456,7 +456,7 @@

    QF_UF (Single Query Track)

    - + CVC4 0 11301004.9071003.09811301130016703 @@ -465,7 +465,7 @@

    QF_UF (Single Query Track)

    - + SMTInterpol 0 11302659.4111084.878113011300167035 @@ -474,7 +474,7 @@

    QF_UF (Single Query Track)

    - + SMTInterpol-fixedn 0 11302632.1451085.452113011300167035 @@ -483,7 +483,7 @@

    QF_UF (Single Query Track)

    - + MathSAT5n 0 1113131.999132.117111311130168734 @@ -492,7 +492,7 @@

    QF_UF (Single Query Track)

    - + Alt-Ergo 0 0426891.637270549.5070002800352 @@ -512,7 +512,7 @@

    QF_UF (Single Query Track)

    - + 2019-Par4n 0 1670475.838322.90216700167011300 @@ -521,7 +521,7 @@

    QF_UF (Single Query Track)

    - + Yices2 0 1670326.562331.62416700167011300 @@ -530,7 +530,7 @@

    QF_UF (Single Query Track)

    - + 2019-Yices 2.6.2n 0 1670327.503333.12716700167011300 @@ -539,7 +539,7 @@

    QF_UF (Single Query Track)

    - + Yices2-fixedn 0 1670326.773334.34116700167011300 @@ -548,7 +548,7 @@

    QF_UF (Single Query Track)

    - + veriT 0 1670683.587683.77616700167011300 @@ -557,7 +557,7 @@

    QF_UF (Single Query Track)

    - + z3n 0 16695750.0775713.83516690166911311 @@ -566,7 +566,7 @@

    QF_UF (Single Query Track)

    - + OpenSMT 0 16689115.059059.98816680166811322 @@ -575,7 +575,7 @@

    QF_UF (Single Query Track)

    - + CVC4 0 166710682.32410672.83616670166711333 @@ -584,7 +584,7 @@

    QF_UF (Single Query Track)

    - + SMTInterpol 0 163556663.72552328.567163501635116535 @@ -593,7 +593,7 @@

    QF_UF (Single Query Track)

    - + SMTInterpol-fixedn 0 163556698.88852342.66163501635116535 @@ -602,7 +602,7 @@

    QF_UF (Single Query Track)

    - + MathSAT5n 0 163245157.77245146.377163201632116834 @@ -611,7 +611,7 @@

    QF_UF (Single Query Track)

    - + Alt-Ergo 0 14151167423.921520963.251415014151385352 @@ -631,7 +631,7 @@

    QF_UF (Single Query Track)

    - + 2019-Par4n 0 2798188.71246.03727981130166822 @@ -640,7 +640,7 @@

    QF_UF (Single Query Track)

    - + Yices2 0 2798247.793257.34527981130166822 @@ -649,7 +649,7 @@

    QF_UF (Single Query Track)

    - + 2019-Yices 2.6.2n 0 2798248.427259.21827981130166822 @@ -658,7 +658,7 @@

    QF_UF (Single Query Track)

    - + Yices2-fixedn 0 2798247.625262.12627981130166822 @@ -667,7 +667,7 @@

    QF_UF (Single Query Track)

    - + veriT 0 2798351.712350.94427981130166822 @@ -676,7 +676,7 @@

    QF_UF (Single Query Track)

    - + z3n 0 27691986.9531975.6662769112816413131 @@ -685,7 +685,7 @@

    QF_UF (Single Query Track)

    - + OpenSMT 0 27662770.9732760.0912766112716393434 @@ -694,7 +694,7 @@

    QF_UF (Single Query Track)

    - + CVC4 0 27602974.1612961.7992760112716334040 @@ -703,7 +703,7 @@

    QF_UF (Single Query Track)

    - + SMTInterpol 0 271911269.1145793.572719112915908181 @@ -712,7 +712,7 @@

    QF_UF (Single Query Track)

    - + SMTInterpol-fixedn 0 271911282.5115796.4932719112915908181 @@ -721,7 +721,7 @@

    QF_UF (Single Query Track)

    - + MathSAT5n 0 27152419.7922408.1162715111316028564 @@ -730,7 +730,7 @@

    QF_UF (Single Query Track)

    - + Alt-Ergo 0 77772671.75351200.421777077720231770 @@ -754,7 +754,6 @@

    QF_UF (Single Query Track)

    - + - diff --git a/archive/2020/results/qf-uf-unsat-core.html b/archive/2020/results/qf-uf-unsat-core.html index 6fa0e688..85658b03 100644 --- a/archive/2020/results/qf-uf-unsat-core.html +++ b/archive/2020/results/qf-uf-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UF (Unsat Core Track)

    Competition results for the QF_UF - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_UF (Unsat Core Track)

    Sequential PerformanceParallel Performance SMTInterpolSMTInterpol - - + + @@ -126,7 +126,7 @@

    QF_UF (Unsat Core Track)

    - + SMTInterpol-fixedn 0 215386 @@ -137,7 +137,7 @@

    QF_UF (Unsat Core Track)

    - + SMTInterpol 0 215366 @@ -148,7 +148,7 @@

    QF_UF (Unsat Core Track)

    - + z3n 0 215351 @@ -159,7 +159,7 @@

    QF_UF (Unsat Core Track)

    - + Yices2-fixedn 0 213972 @@ -170,7 +170,7 @@

    QF_UF (Unsat Core Track)

    - + Yices2 0 213972 @@ -181,7 +181,7 @@

    QF_UF (Unsat Core Track)

    - + CVC4-uc 0 213115 @@ -192,7 +192,7 @@

    QF_UF (Unsat Core Track)

    - + MathSAT5n 8* 193215 @@ -214,7 +214,7 @@

    QF_UF (Unsat Core Track)

    - + SMTInterpol-fixedn 0 21538611536.2156655.9050 @@ -223,7 +223,7 @@

    QF_UF (Unsat Core Track)

    - + SMTInterpol 0 21536611540.9216636.5590 @@ -232,7 +232,7 @@

    QF_UF (Unsat Core Track)

    - + z3n 0 2153513394.3893381.790 @@ -241,7 +241,7 @@

    QF_UF (Unsat Core Track)

    - + Yices2-fixedn 0 2139722044.952045.2340 @@ -250,7 +250,7 @@

    QF_UF (Unsat Core Track)

    - + Yices2 0 2139722056.2522063.9550 @@ -259,7 +259,7 @@

    QF_UF (Unsat Core Track)

    - + CVC4-uc 0 2131153152.2643137.160 @@ -268,7 +268,7 @@

    QF_UF (Unsat Core Track)

    - + MathSAT5n 8* 1932151406.5641353.1520 @@ -295,7 +295,6 @@

    QF_UF (Unsat Core Track)

    - + - diff --git a/archive/2020/results/qf-ufbv-incremental.html b/archive/2020/results/qf-ufbv-incremental.html index dadd1c48..3309e53b 100644 --- a/archive/2020/results/qf-ufbv-incremental.html +++ b/archive/2020/results/qf-ufbv-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFBV (Incremental Track)

    Competition results for the QF_UFBV - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_UFBV (Incremental Track)

    Parallel Performance Bitwuzla - - + + @@ -124,7 +124,7 @@

    QF_UFBV (Incremental Track)

    - + Bitwuzla-fixedn 0 22391848.8761831.50800 @@ -133,7 +133,7 @@

    QF_UFBV (Incremental Track)

    - + Bitwuzla 0 22391926.5241905.50700 @@ -142,7 +142,7 @@

    QF_UFBV (Incremental Track)

    - + 2018-Boolector (incremental)n 0 22383716.9243699.41811 @@ -151,7 +151,7 @@

    QF_UFBV (Incremental Track)

    - + Yices2 incremental 0 22352934.6412913.27142 @@ -160,7 +160,7 @@

    QF_UFBV (Incremental Track)

    - + Yices2-fixed incrementaln 0 22352941.8092915.25242 @@ -169,7 +169,7 @@

    QF_UFBV (Incremental Track)

    - + z3n 0 219711567.04811510.634424 @@ -178,7 +178,7 @@

    QF_UFBV (Incremental Track)

    - + MathSAT5n 0 21745524.595446.175654 @@ -187,7 +187,7 @@

    QF_UFBV (Incremental Track)

    - + CVC4-inc 0 21409738.0599677.991995 @@ -211,7 +211,6 @@

    QF_UFBV (Incremental Track)

    - + - diff --git a/archive/2020/results/qf-ufbv-single-query.html b/archive/2020/results/qf-ufbv-single-query.html index 9eaf5201..9e1937cd 100644 --- a/archive/2020/results/qf-ufbv-single-query.html +++ b/archive/2020/results/qf-ufbv-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFBV (Single Query Track)

    Competition results for the QF_UFBV - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_UFBV (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Yices2Yices2Yices2 - - + + Yices2 - - + + Bitwuzla - + @@ -131,7 +131,7 @@

    QF_UFBV (Single Query Track)

    - + Yices2 0 211 @@ -142,7 +142,7 @@

    QF_UFBV (Single Query Track)

    - + Yices2-fixedn 0 211 @@ -153,7 +153,7 @@

    QF_UFBV (Single Query Track)

    - + 2019-Yices 2.6.2n 0 210 @@ -164,7 +164,7 @@

    QF_UFBV (Single Query Track)

    - + Bitwuzla 0 207 @@ -175,7 +175,7 @@

    QF_UFBV (Single Query Track)

    - + Bitwuzla-fixedn 0 207 @@ -186,7 +186,7 @@

    QF_UFBV (Single Query Track)

    - + MathSAT5n 0 198 @@ -197,7 +197,7 @@

    QF_UFBV (Single Query Track)

    - + CVC4 0 197 @@ -208,7 +208,7 @@

    QF_UFBV (Single Query Track)

    - + z3n 0 186 @@ -230,7 +230,7 @@

    QF_UFBV (Single Query Track)

    - + Yices2 0 21114844.4914845.372111298266 @@ -239,7 +239,7 @@

    QF_UFBV (Single Query Track)

    - + Yices2-fixedn 0 21115141.39815142.9512111298266 @@ -248,7 +248,7 @@

    QF_UFBV (Single Query Track)

    - + 2019-Yices 2.6.2n 0 21014973.14114972.2642101298177 @@ -257,7 +257,7 @@

    QF_UFBV (Single Query Track)

    - + Bitwuzla 0 2073263.0613263.7420712879100 @@ -266,7 +266,7 @@

    QF_UFBV (Single Query Track)

    - + Bitwuzla-fixedn 0 2073351.6283327.04120712879100 @@ -275,7 +275,7 @@

    QF_UFBV (Single Query Track)

    - + MathSAT5n 0 19827680.79527678.973198128701919 @@ -284,7 +284,7 @@

    QF_UFBV (Single Query Track)

    - + CVC4 0 19731539.11831537.511197128692020 @@ -293,7 +293,7 @@

    QF_UFBV (Single Query Track)

    - + z3n 0 18643605.68943526.795186124623131 @@ -313,7 +313,7 @@

    QF_UFBV (Single Query Track)

    - + Yices2 0 129209.752209.7851291290886 @@ -322,7 +322,7 @@

    QF_UFBV (Single Query Track)

    - + 2019-Yices 2.6.2n 0 129212.658212.6871291290887 @@ -331,7 +331,7 @@

    QF_UFBV (Single Query Track)

    - + Yices2-fixedn 0 129216.054216.0911291290886 @@ -340,7 +340,7 @@

    QF_UFBV (Single Query Track)

    - + Bitwuzla 0 128129.713129.7341281280890 @@ -349,7 +349,7 @@

    QF_UFBV (Single Query Track)

    - + Bitwuzla-fixedn 0 128133.795131.1841281280890 @@ -358,7 +358,7 @@

    QF_UFBV (Single Query Track)

    - + MathSAT5n 0 1281590.6881588.12212812808919 @@ -367,7 +367,7 @@

    QF_UFBV (Single Query Track)

    - + CVC4 0 1281646.3061643.07112812808920 @@ -376,7 +376,7 @@

    QF_UFBV (Single Query Track)

    - + z3n 0 1247406.07360.42112412409331 @@ -396,7 +396,7 @@

    QF_UFBV (Single Query Track)

    - + Yices2 0 8214634.73814635.584820821356 @@ -405,7 +405,7 @@

    QF_UFBV (Single Query Track)

    - + Yices2-fixedn 0 8214925.34414926.86820821356 @@ -414,7 +414,7 @@

    QF_UFBV (Single Query Track)

    - + 2019-Yices 2.6.2n 0 8114760.48314759.577810811367 @@ -423,7 +423,7 @@

    QF_UFBV (Single Query Track)

    - + Bitwuzla 0 793133.3483134.007790791380 @@ -432,7 +432,7 @@

    QF_UFBV (Single Query Track)

    - + Bitwuzla-fixedn 0 793217.8333195.857790791380 @@ -441,7 +441,7 @@

    QF_UFBV (Single Query Track)

    - + MathSAT5n 0 7026090.10726090.857007014719 @@ -450,7 +450,7 @@

    QF_UFBV (Single Query Track)

    - + CVC4 0 6929892.81129894.446906914820 @@ -459,7 +459,7 @@

    QF_UFBV (Single Query Track)

    - + z3n 0 6236199.68936166.3736206215531 @@ -479,7 +479,7 @@

    QF_UFBV (Single Query Track)

    - + Bitwuzla 0 198612.988613.1219812870199 @@ -488,7 +488,7 @@

    QF_UFBV (Single Query Track)

    - + Bitwuzla-fixedn 0 198642.443617.24519812870199 @@ -497,7 +497,7 @@

    QF_UFBV (Single Query Track)

    - + 2019-Yices 2.6.2n 0 188795.84794.169188128602929 @@ -506,7 +506,7 @@

    QF_UFBV (Single Query Track)

    - + Yices2 0 188795.276795.301188128602929 @@ -515,7 +515,7 @@

    QF_UFBV (Single Query Track)

    - + Yices2-fixedn 0 188796.132796.159188128602929 @@ -524,7 +524,7 @@

    QF_UFBV (Single Query Track)

    - + CVC4 0 1791553.4731550.237179128513838 @@ -533,7 +533,7 @@

    QF_UFBV (Single Query Track)

    - + MathSAT5n 0 1771376.3391373.766177125524040 @@ -542,7 +542,7 @@

    QF_UFBV (Single Query Track)

    - + z3n 0 1562043.4642031.145156109476161 @@ -566,7 +566,6 @@

    QF_UFBV (Single Query Track)

    - + - diff --git a/archive/2020/results/qf-ufbv-unsat-core.html b/archive/2020/results/qf-ufbv-unsat-core.html index c340cb17..a819ee08 100644 --- a/archive/2020/results/qf-ufbv-unsat-core.html +++ b/archive/2020/results/qf-ufbv-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFBV (Unsat Core Track)

    Competition results for the QF_UFBV - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_UFBV (Unsat Core Track)

    Sequential PerformanceParallel Performance Yices2Yices2 - - + + @@ -126,7 +126,7 @@

    QF_UFBV (Unsat Core Track)

    - + z3n 0 615580 @@ -137,7 +137,7 @@

    QF_UFBV (Unsat Core Track)

    - + Yices2 0 610474 @@ -148,7 +148,7 @@

    QF_UFBV (Unsat Core Track)

    - + Yices2-fixedn 0 610474 @@ -159,7 +159,7 @@

    QF_UFBV (Unsat Core Track)

    - + Bitwuzla-fixedn 0 609598 @@ -170,7 +170,7 @@

    QF_UFBV (Unsat Core Track)

    - + Bitwuzla 0 609598 @@ -181,7 +181,7 @@

    QF_UFBV (Unsat Core Track)

    - + CVC4-uc 0 578923 @@ -192,7 +192,7 @@

    QF_UFBV (Unsat Core Track)

    - + MathSAT5n 0 0 @@ -214,7 +214,7 @@

    QF_UFBV (Unsat Core Track)

    - + z3n 0 6155803874.013874.0753 @@ -223,7 +223,7 @@

    QF_UFBV (Unsat Core Track)

    - + Yices2 0 6104742770.122771.0132 @@ -232,7 +232,7 @@

    QF_UFBV (Unsat Core Track)

    - + Yices2-fixedn 0 6104742772.3952772.9792 @@ -241,7 +241,7 @@

    QF_UFBV (Unsat Core Track)

    - + Bitwuzla-fixedn 0 60959834.90135.3050 @@ -250,7 +250,7 @@

    QF_UFBV (Unsat Core Track)

    - + Bitwuzla 0 60959835.65536.230 @@ -259,7 +259,7 @@

    QF_UFBV (Unsat Core Track)

    - + CVC4-uc 0 5789239063.1159061.7317 @@ -268,7 +268,7 @@

    QF_UFBV (Unsat Core Track)

    - + MathSAT5n 0 019.92720.0490 @@ -292,7 +292,6 @@

    QF_UFBV (Unsat Core Track)

    - + - diff --git a/archive/2020/results/qf-uffp-incremental.html b/archive/2020/results/qf-uffp-incremental.html index 37a2dd2e..1a6b4e51 100644 --- a/archive/2020/results/qf-uffp-incremental.html +++ b/archive/2020/results/qf-uffp-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFFP (Incremental Track)

    Competition results for the QF_UFFP - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_UFFP (Incremental Track)

    Parallel Performance Bitwuzla - - + + @@ -124,7 +124,7 @@

    QF_UFFP (Incremental Track)

    - + Bitwuzla-fixedn 0 20.1350.13500 @@ -133,7 +133,7 @@

    QF_UFFP (Incremental Track)

    - + Bitwuzla 0 20.1380.13800 @@ -142,7 +142,7 @@

    QF_UFFP (Incremental Track)

    - + CVC4-inc 0 20.5830.58300 @@ -151,7 +151,7 @@

    QF_UFFP (Incremental Track)

    - + MathSAT5n 0 21.1991.19900 @@ -175,7 +175,6 @@

    QF_UFFP (Incremental Track)

    - + - diff --git a/archive/2020/results/qf-uffp-single-query.html b/archive/2020/results/qf-uffp-single-query.html index 6600dbf4..fd7f06aa 100644 --- a/archive/2020/results/qf-uffp-single-query.html +++ b/archive/2020/results/qf-uffp-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFFP (Single Query Track)

    Competition results for the QF_UFFP - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_UFFP (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) BitwuzlaBitwuzla— - - + + Bitwuzla - - + + Bitwuzla - + @@ -131,7 +131,7 @@

    QF_UFFP (Single Query Track)

    - + Bitwuzla 0 2 @@ -142,7 +142,7 @@

    QF_UFFP (Single Query Track)

    - + Bitwuzla-fixedn 0 2 @@ -153,7 +153,7 @@

    QF_UFFP (Single Query Track)

    - + COLIBRI 0 2 @@ -164,7 +164,7 @@

    QF_UFFP (Single Query Track)

    - + CVC4 0 2 @@ -175,7 +175,7 @@

    QF_UFFP (Single Query Track)

    - + MathSAT5n 0 2 @@ -197,7 +197,7 @@

    QF_UFFP (Single Query Track)

    - + Bitwuzla 0 20.2310.23120200 @@ -206,7 +206,7 @@

    QF_UFFP (Single Query Track)

    - + Bitwuzla-fixedn 0 20.2340.23420200 @@ -215,7 +215,7 @@

    QF_UFFP (Single Query Track)

    - + COLIBRI 0 20.6380.65920200 @@ -224,7 +224,7 @@

    QF_UFFP (Single Query Track)

    - + CVC4 0 20.8550.85420200 @@ -233,7 +233,7 @@

    QF_UFFP (Single Query Track)

    - + MathSAT5n 0 21.1281.12920200 @@ -253,7 +253,7 @@

    QF_UFFP (Single Query Track)

    - + COLIBRI 0 00.00.000020 @@ -262,7 +262,7 @@

    QF_UFFP (Single Query Track)

    - + CVC4 0 00.00.000020 @@ -271,7 +271,7 @@

    QF_UFFP (Single Query Track)

    - + Bitwuzla 0 00.00.000020 @@ -280,7 +280,7 @@

    QF_UFFP (Single Query Track)

    - + MathSAT5n 0 00.00.000020 @@ -289,7 +289,7 @@

    QF_UFFP (Single Query Track)

    - + Bitwuzla-fixedn 0 00.00.000020 @@ -309,7 +309,7 @@

    QF_UFFP (Single Query Track)

    - + Bitwuzla 0 20.2310.23120200 @@ -318,7 +318,7 @@

    QF_UFFP (Single Query Track)

    - + Bitwuzla-fixedn 0 20.2340.23420200 @@ -327,7 +327,7 @@

    QF_UFFP (Single Query Track)

    - + COLIBRI 0 20.6380.65920200 @@ -336,7 +336,7 @@

    QF_UFFP (Single Query Track)

    - + CVC4 0 20.8550.85420200 @@ -345,7 +345,7 @@

    QF_UFFP (Single Query Track)

    - + MathSAT5n 0 21.1281.12920200 @@ -365,7 +365,7 @@

    QF_UFFP (Single Query Track)

    - + Bitwuzla 0 20.2310.23120200 @@ -374,7 +374,7 @@

    QF_UFFP (Single Query Track)

    - + Bitwuzla-fixedn 0 20.2340.23420200 @@ -383,7 +383,7 @@

    QF_UFFP (Single Query Track)

    - + COLIBRI 0 20.6380.65920200 @@ -392,7 +392,7 @@

    QF_UFFP (Single Query Track)

    - + CVC4 0 20.8550.85420200 @@ -401,7 +401,7 @@

    QF_UFFP (Single Query Track)

    - + MathSAT5n 0 21.1281.12920200 @@ -425,7 +425,6 @@

    QF_UFFP (Single Query Track)

    - + - diff --git a/archive/2020/results/qf-uffp-unsat-core.html b/archive/2020/results/qf-uffp-unsat-core.html index c0b4d5fd..9795e303 100644 --- a/archive/2020/results/qf-uffp-unsat-core.html +++ b/archive/2020/results/qf-uffp-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFFP (Unsat Core Track)

    Competition results for the QF_UFFP - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_UFFP (Unsat Core Track)

    Sequential PerformanceParallel Performance —— - - + + @@ -126,7 +126,7 @@

    QF_UFFP (Unsat Core Track)

    - + MathSAT5n 0 0 @@ -137,7 +137,7 @@

    QF_UFFP (Unsat Core Track)

    - + Bitwuzla 0 0 @@ -148,7 +148,7 @@

    QF_UFFP (Unsat Core Track)

    - + Bitwuzla-fixedn 0 0 @@ -159,7 +159,7 @@

    QF_UFFP (Unsat Core Track)

    - + CVC4-uc 0 0 @@ -181,7 +181,7 @@

    QF_UFFP (Unsat Core Track)

    - + MathSAT5n 0 00.0350.0350 @@ -190,7 +190,7 @@

    QF_UFFP (Unsat Core Track)

    - + Bitwuzla 0 00.2190.2190 @@ -199,7 +199,7 @@

    QF_UFFP (Unsat Core Track)

    - + Bitwuzla-fixedn 0 00.2370.2370 @@ -208,7 +208,7 @@

    QF_UFFP (Unsat Core Track)

    - + CVC4-uc 0 00.8370.8360 @@ -232,7 +232,6 @@

    QF_UFFP (Unsat Core Track)

    - + - diff --git a/archive/2020/results/qf-ufidl-single-query.html b/archive/2020/results/qf-ufidl-single-query.html index 3d32fdec..daeedbb2 100644 --- a/archive/2020/results/qf-ufidl-single-query.html +++ b/archive/2020/results/qf-ufidl-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFIDL (Single Query Track)

    Competition results for the QF_UFIDL - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_UFIDL (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Yices2Yices2Yices2 - - + + Yices2 - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_UFIDL (Single Query Track)

    - + 2019-Yices 2.6.2n 0 298 @@ -142,7 +142,7 @@

    QF_UFIDL (Single Query Track)

    - + Yices2-fixedn 0 298 @@ -153,7 +153,7 @@

    QF_UFIDL (Single Query Track)

    - + Yices2 0 298 @@ -164,7 +164,7 @@

    QF_UFIDL (Single Query Track)

    - + z3n 0 298 @@ -175,7 +175,7 @@

    QF_UFIDL (Single Query Track)

    - + SMTInterpol-fixedn 0 284 @@ -186,7 +186,7 @@

    QF_UFIDL (Single Query Track)

    - + SMTInterpol 0 284 @@ -197,7 +197,7 @@

    QF_UFIDL (Single Query Track)

    - + MathSAT5n 0 283 @@ -208,7 +208,7 @@

    QF_UFIDL (Single Query Track)

    - + veriT 0 275 @@ -219,7 +219,7 @@

    QF_UFIDL (Single Query Track)

    - + CVC4 0 270 @@ -241,7 +241,7 @@

    QF_UFIDL (Single Query Track)

    - + 2019-Yices 2.6.2n 0 2983107.3923106.5282987322522 @@ -250,7 +250,7 @@

    QF_UFIDL (Single Query Track)

    - + Yices2-fixedn 0 2983122.6613123.1472987322522 @@ -259,7 +259,7 @@

    QF_UFIDL (Single Query Track)

    - + Yices2 0 2983125.4093125.6862987322522 @@ -268,7 +268,7 @@

    QF_UFIDL (Single Query Track)

    - + z3n 0 2984189.8524185.8472987322522 @@ -277,7 +277,7 @@

    QF_UFIDL (Single Query Track)

    - + SMTInterpol 0 28431185.14929161.75284732111616 @@ -286,7 +286,7 @@

    QF_UFIDL (Single Query Track)

    - + SMTInterpol-fixedn 0 28431141.60229274.155284732111616 @@ -295,7 +295,7 @@

    QF_UFIDL (Single Query Track)

    - + MathSAT5n 0 28327269.97527249.679283732101717 @@ -304,7 +304,7 @@

    QF_UFIDL (Single Query Track)

    - + veriT 0 27532449.88432450.977275732022521 @@ -313,7 +313,7 @@

    QF_UFIDL (Single Query Track)

    - + CVC4 0 27050147.56650115.955270731973030 @@ -333,7 +333,7 @@

    QF_UFIDL (Single Query Track)

    - + 2019-Yices 2.6.2n 0 733.7833.984737302272 @@ -342,7 +342,7 @@

    QF_UFIDL (Single Query Track)

    - + Yices2 0 734.4274.556737302272 @@ -351,7 +351,7 @@

    QF_UFIDL (Single Query Track)

    - + Yices2-fixedn 0 734.4644.744737302272 @@ -360,7 +360,7 @@

    QF_UFIDL (Single Query Track)

    - + veriT 0 7311.511.4877373022721 @@ -369,7 +369,7 @@

    QF_UFIDL (Single Query Track)

    - + MathSAT5n 0 7329.79529.8017373022717 @@ -378,7 +378,7 @@

    QF_UFIDL (Single Query Track)

    - + z3n 0 73106.461102.176737302272 @@ -387,7 +387,7 @@

    QF_UFIDL (Single Query Track)

    - + SMTInterpol 0 73306.005106.8697373022716 @@ -396,7 +396,7 @@

    QF_UFIDL (Single Query Track)

    - + SMTInterpol-fixedn 0 73312.403110.3277373022716 @@ -405,7 +405,7 @@

    QF_UFIDL (Single Query Track)

    - + CVC4 0 73752.308752.3427373022730 @@ -425,7 +425,7 @@

    QF_UFIDL (Single Query Track)

    - + 2019-Yices 2.6.2n 0 2253103.6093102.5442250225752 @@ -434,7 +434,7 @@

    QF_UFIDL (Single Query Track)

    - + Yices2-fixedn 0 2253118.1973118.4032250225752 @@ -443,7 +443,7 @@

    QF_UFIDL (Single Query Track)

    - + Yices2 0 2253120.9813121.1292250225752 @@ -452,7 +452,7 @@

    QF_UFIDL (Single Query Track)

    - + z3n 0 2254083.3914083.6722250225752 @@ -461,7 +461,7 @@

    QF_UFIDL (Single Query Track)

    - + SMTInterpol 0 21130879.14329054.88121102118916 @@ -470,7 +470,7 @@

    QF_UFIDL (Single Query Track)

    - + SMTInterpol-fixedn 0 21130829.19929163.82821102118916 @@ -479,7 +479,7 @@

    QF_UFIDL (Single Query Track)

    - + MathSAT5n 0 21027240.1827219.87821002109017 @@ -488,7 +488,7 @@

    QF_UFIDL (Single Query Track)

    - + veriT 0 20232438.38432439.4920202029821 @@ -497,7 +497,7 @@

    QF_UFIDL (Single Query Track)

    - + CVC4 0 19749395.25749363.613197019710330 @@ -517,7 +517,7 @@

    QF_UFIDL (Single Query Track)

    - + 2019-Yices 2.6.2n 0 293336.242335.3452937322077 @@ -526,7 +526,7 @@

    QF_UFIDL (Single Query Track)

    - + Yices2 0 293343.059343.3142937322077 @@ -535,7 +535,7 @@

    QF_UFIDL (Single Query Track)

    - + Yices2-fixedn 0 293343.011343.4352937322077 @@ -544,7 +544,7 @@

    QF_UFIDL (Single Query Track)

    - + z3n 0 290596.367592.123290732171010 @@ -553,7 +553,7 @@

    QF_UFIDL (Single Query Track)

    - + SMTInterpol 0 2583488.771982.211258731854242 @@ -562,7 +562,7 @@

    QF_UFIDL (Single Query Track)

    - + SMTInterpol-fixedn 0 2583390.0751989.102258731854242 @@ -571,7 +571,7 @@

    QF_UFIDL (Single Query Track)

    - + veriT 0 2561383.4311383.416256731834444 @@ -580,7 +580,7 @@

    QF_UFIDL (Single Query Track)

    - + MathSAT5n 0 2561742.3911720.819256731834444 @@ -589,7 +589,7 @@

    QF_UFIDL (Single Query Track)

    - + CVC4 0 2052800.0462798.523205721339595 @@ -613,7 +613,6 @@

    QF_UFIDL (Single Query Track)

    - + - diff --git a/archive/2020/results/qf-ufidl-unsat-core.html b/archive/2020/results/qf-ufidl-unsat-core.html index 8896f81f..b4d55359 100644 --- a/archive/2020/results/qf-ufidl-unsat-core.html +++ b/archive/2020/results/qf-ufidl-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFIDL (Unsat Core Track)

    Competition results for the QF_UFIDL - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_UFIDL (Unsat Core Track)

    Sequential PerformanceParallel Performance Yices2Yices2 - - + + @@ -126,7 +126,7 @@

    QF_UFIDL (Unsat Core Track)

    - + Yices2-fixedn 0 19 @@ -137,7 +137,7 @@

    QF_UFIDL (Unsat Core Track)

    - + Yices2 0 19 @@ -148,7 +148,7 @@

    QF_UFIDL (Unsat Core Track)

    - + CVC4-uc 0 19 @@ -159,7 +159,7 @@

    QF_UFIDL (Unsat Core Track)

    - + MathSAT5n 0 19 @@ -170,7 +170,7 @@

    QF_UFIDL (Unsat Core Track)

    - + z3n 0 19 @@ -181,7 +181,7 @@

    QF_UFIDL (Unsat Core Track)

    - + SMTInterpol-fixedn 0 19 @@ -192,7 +192,7 @@

    QF_UFIDL (Unsat Core Track)

    - + SMTInterpol 0 19 @@ -214,7 +214,7 @@

    QF_UFIDL (Unsat Core Track)

    - + Yices2-fixedn 0 190.0480.1730 @@ -223,7 +223,7 @@

    QF_UFIDL (Unsat Core Track)

    - + Yices2 0 190.050.2510 @@ -232,7 +232,7 @@

    QF_UFIDL (Unsat Core Track)

    - + CVC4-uc 0 190.1770.2760 @@ -241,7 +241,7 @@

    QF_UFIDL (Unsat Core Track)

    - + MathSAT5n 0 190.2360.3050 @@ -250,7 +250,7 @@

    QF_UFIDL (Unsat Core Track)

    - + z3n 0 190.4870.4880 @@ -259,7 +259,7 @@

    QF_UFIDL (Unsat Core Track)

    - + SMTInterpol-fixedn 0 195.0113.7560 @@ -268,7 +268,7 @@

    QF_UFIDL (Unsat Core Track)

    - + SMTInterpol 0 195.1923.9530 @@ -292,7 +292,6 @@

    QF_UFIDL (Unsat Core Track)

    - + - diff --git a/archive/2020/results/qf-uflia-incremental.html b/archive/2020/results/qf-uflia-incremental.html index 6a1544c0..aed2d6b1 100644 --- a/archive/2020/results/qf-uflia-incremental.html +++ b/archive/2020/results/qf-uflia-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFLIA (Incremental Track)

    Competition results for the QF_UFLIA - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_UFLIA (Incremental Track)

    Parallel Performance Yices2 incremental - - + + @@ -124,7 +124,7 @@

    QF_UFLIA (Incremental Track)

    - + 2018-Z3 (incremental)n 0 1296270666.41670695.49391351 @@ -133,7 +133,7 @@

    QF_UFLIA (Incremental Track)

    - + z3n 0 1183173564.67273564.426504456 @@ -142,7 +142,7 @@

    QF_UFLIA (Incremental Track)

    - + MathSAT5n 0 1007295643.14895643.503680376 @@ -151,7 +151,7 @@

    QF_UFLIA (Incremental Track)

    - + Yices2-fixed incrementaln 0 931676487.65176487.299755960 @@ -160,7 +160,7 @@

    QF_UFLIA (Incremental Track)

    - + Yices2 incremental 0 931076453.39576452.566756560 @@ -169,7 +169,7 @@

    QF_UFLIA (Incremental Track)

    - + SMTInterpol 0 872492067.62691452.296815173 @@ -178,7 +178,7 @@

    QF_UFLIA (Incremental Track)

    - + SMTInterpol-fixedn 0 872492061.73691456.843815173 @@ -187,7 +187,7 @@

    QF_UFLIA (Incremental Track)

    - + CVC4-inc 0 694992766.1692766.114992671 @@ -211,7 +211,6 @@

    QF_UFLIA (Incremental Track)

    - + - diff --git a/archive/2020/results/qf-uflia-single-query.html b/archive/2020/results/qf-uflia-single-query.html index 48fa67b8..88a8fba4 100644 --- a/archive/2020/results/qf-uflia-single-query.html +++ b/archive/2020/results/qf-uflia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFLIA (Single Query Track)

    Competition results for the QF_UFLIA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_UFLIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Yices2Yices2Yices2 - - + + Yices2 - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_UFLIA (Single Query Track)

    - + 2018-Yicesn 0 300 @@ -142,7 +142,7 @@

    QF_UFLIA (Single Query Track)

    - + Yices2-fixedn 0 300 @@ -153,7 +153,7 @@

    QF_UFLIA (Single Query Track)

    - + Yices2 0 300 @@ -164,7 +164,7 @@

    QF_UFLIA (Single Query Track)

    - + z3n 0 300 @@ -175,7 +175,7 @@

    QF_UFLIA (Single Query Track)

    - + CVC4 0 300 @@ -186,7 +186,7 @@

    QF_UFLIA (Single Query Track)

    - + SMTInterpol 0 300 @@ -197,7 +197,7 @@

    QF_UFLIA (Single Query Track)

    - + SMTInterpol-fixedn 0 300 @@ -208,7 +208,7 @@

    QF_UFLIA (Single Query Track)

    - + MathSAT5n 0 298 @@ -219,7 +219,7 @@

    QF_UFLIA (Single Query Track)

    - + veriT 0 273 @@ -230,7 +230,7 @@

    QF_UFLIA (Single Query Track)

    - + Alt-Ergo 0 64 @@ -252,7 +252,7 @@

    QF_UFLIA (Single Query Track)

    - + 2018-Yicesn 0 30052.37653.5453002316900 @@ -261,7 +261,7 @@

    QF_UFLIA (Single Query Track)

    - + Yices2 0 30073.35455.6363002316900 @@ -270,7 +270,7 @@

    QF_UFLIA (Single Query Track)

    - + Yices2-fixedn 0 30066.07356.2883002316900 @@ -279,7 +279,7 @@

    QF_UFLIA (Single Query Track)

    - + z3n 0 300147.535147.5923002316900 @@ -288,7 +288,7 @@

    QF_UFLIA (Single Query Track)

    - + CVC4 0 300565.883565.8663002316900 @@ -297,7 +297,7 @@

    QF_UFLIA (Single Query Track)

    - + SMTInterpol 0 3002423.4971764.2783002316900 @@ -306,7 +306,7 @@

    QF_UFLIA (Single Query Track)

    - + SMTInterpol-fixedn 0 3002434.2471811.6163002316900 @@ -315,7 +315,7 @@

    QF_UFLIA (Single Query Track)

    - + MathSAT5n 0 2984640.9784641.6122982306822 @@ -324,7 +324,7 @@

    QF_UFLIA (Single Query Track)

    - + veriT 0 27325860.04825839.02927320667277 @@ -333,7 +333,7 @@

    QF_UFLIA (Single Query Track)

    - + Alt-Ergo 0 65102569.95186021.3286506523557 @@ -353,7 +353,7 @@

    QF_UFLIA (Single Query Track)

    - + Yices2 0 2314.5955.252312310690 @@ -362,7 +362,7 @@

    QF_UFLIA (Single Query Track)

    - + Yices2-fixedn 0 2314.5675.842312310690 @@ -371,7 +371,7 @@

    QF_UFLIA (Single Query Track)

    - + 2018-Yicesn 0 2315.0085.8922312310690 @@ -380,7 +380,7 @@

    QF_UFLIA (Single Query Track)

    - + z3n 0 23130.1230.1492312310690 @@ -389,7 +389,7 @@

    QF_UFLIA (Single Query Track)

    - + CVC4 0 231135.741135.7052312310690 @@ -398,7 +398,7 @@

    QF_UFLIA (Single Query Track)

    - + SMTInterpol 0 2311776.4291303.9392312310690 @@ -407,7 +407,7 @@

    QF_UFLIA (Single Query Track)

    - + SMTInterpol-fixedn 0 2311786.9321337.7292312310690 @@ -416,7 +416,7 @@

    QF_UFLIA (Single Query Track)

    - + MathSAT5n 0 2301694.9441695.0092302300702 @@ -425,7 +425,7 @@

    QF_UFLIA (Single Query Track)

    - + veriT 0 20621291.69921292.9562062060947 @@ -434,7 +434,7 @@

    QF_UFLIA (Single Query Track)

    - + Alt-Ergo 0 095344.06780602.21400030057 @@ -454,7 +454,7 @@

    QF_UFLIA (Single Query Track)

    - + 2018-Yicesn 0 6947.36847.654690692310 @@ -463,7 +463,7 @@

    QF_UFLIA (Single Query Track)

    - + Yices2 0 6968.75850.386690692310 @@ -472,7 +472,7 @@

    QF_UFLIA (Single Query Track)

    - + Yices2-fixedn 0 6961.50650.448690692310 @@ -481,7 +481,7 @@

    QF_UFLIA (Single Query Track)

    - + z3n 0 69117.414117.443690692310 @@ -490,7 +490,7 @@

    QF_UFLIA (Single Query Track)

    - + CVC4 0 69430.142430.161690692310 @@ -499,7 +499,7 @@

    QF_UFLIA (Single Query Track)

    - + SMTInterpol 0 69647.068460.339690692310 @@ -508,7 +508,7 @@

    QF_UFLIA (Single Query Track)

    - + SMTInterpol-fixedn 0 69647.315473.888690692310 @@ -517,7 +517,7 @@

    QF_UFLIA (Single Query Track)

    - + MathSAT5n 0 682946.0342946.602680682322 @@ -526,7 +526,7 @@

    QF_UFLIA (Single Query Track)

    - + veriT 0 674568.3494546.073670672337 @@ -535,7 +535,7 @@

    QF_UFLIA (Single Query Track)

    - + Alt-Ergo 0 657225.8835419.1156506523557 @@ -555,7 +555,7 @@

    QF_UFLIA (Single Query Track)

    - + 2018-Yicesn 0 30052.37653.5453002316900 @@ -564,7 +564,7 @@

    QF_UFLIA (Single Query Track)

    - + Yices2 0 30073.35455.6363002316900 @@ -573,7 +573,7 @@

    QF_UFLIA (Single Query Track)

    - + Yices2-fixedn 0 30066.07356.2883002316900 @@ -582,7 +582,7 @@

    QF_UFLIA (Single Query Track)

    - + z3n 0 29994.22294.2692992316811 @@ -591,7 +591,7 @@

    QF_UFLIA (Single Query Track)

    - + CVC4 0 295345.45345.4042952316455 @@ -600,7 +600,7 @@

    QF_UFLIA (Single Query Track)

    - + SMTInterpol-fixedn 0 2941032.573506.9672942286666 @@ -609,7 +609,7 @@

    QF_UFLIA (Single Query Track)

    - + SMTInterpol 0 2941086.366527.0952942286666 @@ -618,7 +618,7 @@

    QF_UFLIA (Single Query Track)

    - + MathSAT5n 0 289402.741402.785289224651111 @@ -627,7 +627,7 @@

    QF_UFLIA (Single Query Track)

    - + veriT 0 2023079.263056.821202145579888 @@ -636,7 +636,7 @@

    QF_UFLIA (Single Query Track)

    - + Alt-Ergo 0 625280.2693526.11862062238117 @@ -660,7 +660,6 @@

    QF_UFLIA (Single Query Track)

    - + - diff --git a/archive/2020/results/qf-uflia-unsat-core.html b/archive/2020/results/qf-uflia-unsat-core.html index a7495650..19d915b0 100644 --- a/archive/2020/results/qf-uflia-unsat-core.html +++ b/archive/2020/results/qf-uflia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFLIA (Unsat Core Track)

    Competition results for the QF_UFLIA - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_UFLIA (Unsat Core Track)

    Sequential PerformanceParallel Performance Yices2Yices2 - - + + @@ -126,7 +126,7 @@

    QF_UFLIA (Unsat Core Track)

    - + Yices2-fixedn 0 21 @@ -137,7 +137,7 @@

    QF_UFLIA (Unsat Core Track)

    - + Yices2 0 21 @@ -148,7 +148,7 @@

    QF_UFLIA (Unsat Core Track)

    - + MathSAT5n 0 21 @@ -159,7 +159,7 @@

    QF_UFLIA (Unsat Core Track)

    - + z3n 0 21 @@ -170,7 +170,7 @@

    QF_UFLIA (Unsat Core Track)

    - + SMTInterpol-fixedn 0 21 @@ -181,7 +181,7 @@

    QF_UFLIA (Unsat Core Track)

    - + SMTInterpol 0 21 @@ -192,7 +192,7 @@

    QF_UFLIA (Unsat Core Track)

    - + CVC4-uc 0 18 @@ -214,7 +214,7 @@

    QF_UFLIA (Unsat Core Track)

    - + Yices2-fixedn 0 210.0610.2350 @@ -223,7 +223,7 @@

    QF_UFLIA (Unsat Core Track)

    - + Yices2 0 210.0650.2720 @@ -232,7 +232,7 @@

    QF_UFLIA (Unsat Core Track)

    - + MathSAT5n 0 210.2930.3130 @@ -241,7 +241,7 @@

    QF_UFLIA (Unsat Core Track)

    - + z3n 0 210.630.6320 @@ -250,7 +250,7 @@

    QF_UFLIA (Unsat Core Track)

    - + SMTInterpol 0 216.3154.7370 @@ -259,7 +259,7 @@

    QF_UFLIA (Unsat Core Track)

    - + SMTInterpol-fixedn 0 216.34.8350 @@ -268,7 +268,7 @@

    QF_UFLIA (Unsat Core Track)

    - + CVC4-uc 0 180.2230.2950 @@ -292,7 +292,6 @@

    QF_UFLIA (Unsat Core Track)

    - + - diff --git a/archive/2020/results/qf-uflra-incremental.html b/archive/2020/results/qf-uflra-incremental.html index aa2e246b..5f34ecd4 100644 --- a/archive/2020/results/qf-uflra-incremental.html +++ b/archive/2020/results/qf-uflra-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFLRA (Incremental Track)

    Competition results for the QF_UFLRA - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_UFLRA (Incremental Track)

    Parallel Performance Yices2 incremental - - + + @@ -124,7 +124,7 @@

    QF_UFLRA (Incremental Track)

    - + z3n 0 97938634.8438344.72500 @@ -133,7 +133,7 @@

    QF_UFLRA (Incremental Track)

    - + 2019-Z3n 0 97938972.9148891.38700 @@ -142,7 +142,7 @@

    QF_UFLRA (Incremental Track)

    - + MathSAT5n 0 97918750.5768476.95621 @@ -151,7 +151,7 @@

    QF_UFLRA (Incremental Track)

    - + Yices2 incremental 0 978011944.71711753.99134 @@ -160,7 +160,7 @@

    QF_UFLRA (Incremental Track)

    - + Yices2-fixed incrementaln 0 978011995.88111804.362134 @@ -169,7 +169,7 @@

    QF_UFLRA (Incremental Track)

    - + SMTInterpol 0 977734926.13728766.239168 @@ -178,7 +178,7 @@

    QF_UFLRA (Incremental Track)

    - + SMTInterpol-fixedn 0 977735175.42929015.765168 @@ -187,7 +187,7 @@

    QF_UFLRA (Incremental Track)

    - + CVC4-inc 0 977637957.08437574.441179 @@ -211,7 +211,6 @@

    QF_UFLRA (Incremental Track)

    - + - diff --git a/archive/2020/results/qf-uflra-single-query.html b/archive/2020/results/qf-uflra-single-query.html index 94ba2549..dbc6b4a0 100644 --- a/archive/2020/results/qf-uflra-single-query.html +++ b/archive/2020/results/qf-uflra-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFLRA (Single Query Track)

    Competition results for the QF_UFLRA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_UFLRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Yices2Yices2Yices2 - - + + Yices2 - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_UFLRA (Single Query Track)

    - + 2019-SMTInterpoln 0 431 @@ -142,7 +142,7 @@

    QF_UFLRA (Single Query Track)

    - + Yices2 0 430 @@ -153,7 +153,7 @@

    QF_UFLRA (Single Query Track)

    - + Yices2-fixedn 0 430 @@ -164,7 +164,7 @@

    QF_UFLRA (Single Query Track)

    - + veriT 0 430 @@ -175,7 +175,7 @@

    QF_UFLRA (Single Query Track)

    - + MathSAT5n 0 430 @@ -186,7 +186,7 @@

    QF_UFLRA (Single Query Track)

    - + CVC4 0 430 @@ -197,7 +197,7 @@

    QF_UFLRA (Single Query Track)

    - + SMTInterpol 0 430 @@ -208,7 +208,7 @@

    QF_UFLRA (Single Query Track)

    - + SMTInterpol-fixedn 0 430 @@ -219,7 +219,7 @@

    QF_UFLRA (Single Query Track)

    - + z3n 0 412 @@ -230,7 +230,7 @@

    QF_UFLRA (Single Query Track)

    - + Alt-Ergo 0 170 @@ -252,7 +252,7 @@

    QF_UFLRA (Single Query Track)

    - + 2019-SMTInterpoln 0 4313979.8572865.78443125018111 @@ -261,7 +261,7 @@

    QF_UFLRA (Single Query Track)

    - + Yices2 0 4302435.9232436.76443024918122 @@ -270,7 +270,7 @@

    QF_UFLRA (Single Query Track)

    - + Yices2-fixedn 0 4302435.932437.32943024918122 @@ -279,7 +279,7 @@

    QF_UFLRA (Single Query Track)

    - + veriT 0 4302476.2632476.29743024918122 @@ -288,7 +288,7 @@

    QF_UFLRA (Single Query Track)

    - + MathSAT5n 0 4302512.5622512.6243024918122 @@ -297,7 +297,7 @@

    QF_UFLRA (Single Query Track)

    - + CVC4 0 4302995.0682994.99143024918122 @@ -306,7 +306,7 @@

    QF_UFLRA (Single Query Track)

    - + SMTInterpol 0 4304241.5533214.89743024918122 @@ -315,7 +315,7 @@

    QF_UFLRA (Single Query Track)

    - + SMTInterpol-fixedn 0 4304266.4873218.19243024918122 @@ -324,7 +324,7 @@

    QF_UFLRA (Single Query Track)

    - + z3n 0 41224282.58524282.6534122311812020 @@ -333,7 +333,7 @@

    QF_UFLRA (Single Query Track)

    - + Alt-Ergo 0 172150111.7965655.268172017226019 @@ -353,7 +353,7 @@

    QF_UFLRA (Single Query Track)

    - + 2019-SMTInterpoln 0 2502064.8651399.94825025001821 @@ -362,7 +362,7 @@

    QF_UFLRA (Single Query Track)

    - + Yices2 0 2491229.8941230.33824924901832 @@ -371,7 +371,7 @@

    QF_UFLRA (Single Query Track)

    - + Yices2-fixedn 0 2491229.8981230.67124924901832 @@ -380,7 +380,7 @@

    QF_UFLRA (Single Query Track)

    - + veriT 0 2491268.0231268.07324924901832 @@ -389,7 +389,7 @@

    QF_UFLRA (Single Query Track)

    - + MathSAT5n 0 2491301.5491301.58724924901832 @@ -398,7 +398,7 @@

    QF_UFLRA (Single Query Track)

    - + CVC4 0 2491759.0281759.00524924901832 @@ -407,7 +407,7 @@

    QF_UFLRA (Single Query Track)

    - + SMTInterpol 0 2492363.3341764.0324924901832 @@ -416,7 +416,7 @@

    QF_UFLRA (Single Query Track)

    - + SMTInterpol-fixedn 0 2492370.6271764.50724924901832 @@ -425,7 +425,7 @@

    QF_UFLRA (Single Query Track)

    - + z3n 0 23123043.08323043.132231231020120 @@ -434,7 +434,7 @@

    QF_UFLRA (Single Query Track)

    - + Alt-Ergo 0 0103020.92644808.93600043219 @@ -454,7 +454,7 @@

    QF_UFLRA (Single Query Track)

    - + Yices2 0 1816.0296.42618101812512 @@ -463,7 +463,7 @@

    QF_UFLRA (Single Query Track)

    - + Yices2-fixedn 0 1816.0326.65918101812512 @@ -472,7 +472,7 @@

    QF_UFLRA (Single Query Track)

    - + veriT 0 1818.248.22418101812512 @@ -481,7 +481,7 @@

    QF_UFLRA (Single Query Track)

    - + MathSAT5n 0 18111.01311.03318101812512 @@ -490,7 +490,7 @@

    QF_UFLRA (Single Query Track)

    - + CVC4 0 18136.0435.98618101812512 @@ -499,7 +499,7 @@

    QF_UFLRA (Single Query Track)

    - + z3n 0 18139.50139.521181018125120 @@ -508,7 +508,7 @@

    QF_UFLRA (Single Query Track)

    - + SMTInterpol 0 181678.22250.86718101812512 @@ -517,7 +517,7 @@

    QF_UFLRA (Single Query Track)

    - + SMTInterpol-fixedn 0 181695.86253.68518101812512 @@ -526,7 +526,7 @@

    QF_UFLRA (Single Query Track)

    - + 2019-SMTInterpoln 0 181714.992265.83518101812511 @@ -535,7 +535,7 @@

    QF_UFLRA (Single Query Track)

    - + Alt-Ergo 0 17245890.86419646.332172017226019 @@ -555,7 +555,7 @@

    QF_UFLRA (Single Query Track)

    - + Yices2 0 43083.92384.76443024918122 @@ -564,7 +564,7 @@

    QF_UFLRA (Single Query Track)

    - + Yices2-fixedn 0 43083.9385.32943024918122 @@ -573,7 +573,7 @@

    QF_UFLRA (Single Query Track)

    - + veriT 0 430124.263124.29743024918122 @@ -582,7 +582,7 @@

    QF_UFLRA (Single Query Track)

    - + MathSAT5n 0 429157.561157.61842924818133 @@ -591,7 +591,7 @@

    QF_UFLRA (Single Query Track)

    - + SMTInterpol-fixedn 0 4271523.489645.98842724618155 @@ -600,7 +600,7 @@

    QF_UFLRA (Single Query Track)

    - + SMTInterpol 0 4271506.36648.5442724618155 @@ -609,7 +609,7 @@

    QF_UFLRA (Single Query Track)

    - + 2019-SMTInterpoln 0 4271532.293656.30642724618155 @@ -618,7 +618,7 @@

    QF_UFLRA (Single Query Track)

    - + CVC4 0 426243.183243.07242624518166 @@ -627,7 +627,7 @@

    QF_UFLRA (Single Query Track)

    - + z3n 0 409656.627656.6714092281812323 @@ -636,7 +636,7 @@

    QF_UFLRA (Single Query Track)

    - + Alt-Ergo 0 6613248.0319138.71766066366307 @@ -660,7 +660,6 @@

    QF_UFLRA (Single Query Track)

    - + - diff --git a/archive/2020/results/qf-uflra-unsat-core.html b/archive/2020/results/qf-uflra-unsat-core.html index ce6c9c56..764c4d48 100644 --- a/archive/2020/results/qf-uflra-unsat-core.html +++ b/archive/2020/results/qf-uflra-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFLRA (Unsat Core Track)

    Competition results for the QF_UFLRA - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_UFLRA (Unsat Core Track)

    Sequential PerformanceParallel Performance CVC4-ucCVC4-uc - - + + @@ -126,7 +126,7 @@

    QF_UFLRA (Unsat Core Track)

    - + z3n 0 60 @@ -137,7 +137,7 @@

    QF_UFLRA (Unsat Core Track)

    - + MathSAT5n 0 60 @@ -148,7 +148,7 @@

    QF_UFLRA (Unsat Core Track)

    - + CVC4-uc 0 59 @@ -159,7 +159,7 @@

    QF_UFLRA (Unsat Core Track)

    - + Yices2 0 58 @@ -170,7 +170,7 @@

    QF_UFLRA (Unsat Core Track)

    - + Yices2-fixedn 0 58 @@ -181,7 +181,7 @@

    QF_UFLRA (Unsat Core Track)

    - + SMTInterpol-fixedn 0 58 @@ -192,7 +192,7 @@

    QF_UFLRA (Unsat Core Track)

    - + SMTInterpol 0 58 @@ -214,7 +214,7 @@

    QF_UFLRA (Unsat Core Track)

    - + z3n 0 6067.63867.6740 @@ -223,7 +223,7 @@

    QF_UFLRA (Unsat Core Track)

    - + MathSAT5n 0 60115.612115.7580 @@ -232,7 +232,7 @@

    QF_UFLRA (Unsat Core Track)

    - + CVC4-uc 0 59258.125258.3580 @@ -241,7 +241,7 @@

    QF_UFLRA (Unsat Core Track)

    - + Yices2-fixedn 0 5826.63921.1280 @@ -250,7 +250,7 @@

    QF_UFLRA (Unsat Core Track)

    - + Yices2 0 5820.99221.8090 @@ -259,7 +259,7 @@

    QF_UFLRA (Unsat Core Track)

    - + SMTInterpol-fixedn 0 581130.174868.7860 @@ -268,7 +268,7 @@

    QF_UFLRA (Unsat Core Track)

    - + SMTInterpol 0 581130.381878.4160 @@ -292,7 +292,6 @@

    QF_UFLRA (Unsat Core Track)

    - + - diff --git a/archive/2020/results/qf-ufnia-incremental.html b/archive/2020/results/qf-ufnia-incremental.html index 56f166c8..acca7927 100644 --- a/archive/2020/results/qf-ufnia-incremental.html +++ b/archive/2020/results/qf-ufnia-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFNIA (Incremental Track)

    Competition results for the QF_UFNIA - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_UFNIA (Incremental Track)

    Parallel Performance CVC4-inc - - + + @@ -124,7 +124,7 @@

    QF_UFNIA (Incremental Track)

    - + MathSAT5n 0 8457337.5232.57100 @@ -133,7 +133,7 @@

    QF_UFNIA (Incremental Track)

    - + 2019-MathSAT-defaultn 0 8457340.36935.36800 @@ -142,7 +142,7 @@

    QF_UFNIA (Incremental Track)

    - + z3n 0 8457347.87843.6900 @@ -151,7 +151,7 @@

    QF_UFNIA (Incremental Track)

    - + CVC4-inc 0 84573265.071258.68300 @@ -160,7 +160,7 @@

    QF_UFNIA (Incremental Track)

    - + Yices2 incremental 0 215511200.01200.0630221 @@ -169,7 +169,7 @@

    QF_UFNIA (Incremental Track)

    - + Yices2-fixed incrementaln 0 192731200.01200.0653001 @@ -193,7 +193,6 @@

    QF_UFNIA (Incremental Track)

    - + - diff --git a/archive/2020/results/qf-ufnia-single-query.html b/archive/2020/results/qf-ufnia-single-query.html index 33a86609..17fc60b3 100644 --- a/archive/2020/results/qf-ufnia-single-query.html +++ b/archive/2020/results/qf-ufnia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFNIA (Single Query Track)

    Competition results for the QF_UFNIA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_UFNIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) CVC4CVC4Yices2 - - + + CVC4 - - + + CVC4 - + @@ -131,7 +131,7 @@

    QF_UFNIA (Single Query Track)

    - + CVC4 0 280 @@ -142,7 +142,7 @@

    QF_UFNIA (Single Query Track)

    - + Yices2-fixedn 0 278 @@ -153,7 +153,7 @@

    QF_UFNIA (Single Query Track)

    - + Yices2 0 278 @@ -164,7 +164,7 @@

    QF_UFNIA (Single Query Track)

    - + 2019-CVC4n 0 276 @@ -175,7 +175,7 @@

    QF_UFNIA (Single Query Track)

    - + MathSAT5n 0 267 @@ -186,7 +186,7 @@

    QF_UFNIA (Single Query Track)

    - + z3n 0 259 @@ -197,7 +197,7 @@

    QF_UFNIA (Single Query Track)

    - + Alt-Ergo 0 39 @@ -219,7 +219,7 @@

    QF_UFNIA (Single Query Track)

    - + CVC4 0 2804957.3124957.2528020179204 @@ -228,7 +228,7 @@

    QF_UFNIA (Single Query Track)

    - + Yices2-fixedn 0 27827016.01327016.778278206722222 @@ -237,7 +237,7 @@

    QF_UFNIA (Single Query Track)

    - + Yices2 0 27827054.59627018.621278206722222 @@ -246,7 +246,7 @@

    QF_UFNIA (Single Query Track)

    - + 2019-CVC4n 0 2767449.9637450.03427619977246 @@ -255,7 +255,7 @@

    QF_UFNIA (Single Query Track)

    - + MathSAT5n 0 26739798.66539797.739267187803333 @@ -264,7 +264,7 @@

    QF_UFNIA (Single Query Track)

    - + z3n 0 25922585.00922585.575259182774115 @@ -273,7 +273,7 @@

    QF_UFNIA (Single Query Track)

    - + Alt-Ergo 0 397765.7717468.795390392616 @@ -293,7 +293,7 @@

    QF_UFNIA (Single Query Track)

    - + Yices2-fixedn 0 2061814.6061815.05420620609422 @@ -302,7 +302,7 @@

    QF_UFNIA (Single Query Track)

    - + Yices2 0 2061853.2281817.12520620609422 @@ -311,7 +311,7 @@

    QF_UFNIA (Single Query Track)

    - + CVC4 0 2012451.7072451.6632012010994 @@ -320,7 +320,7 @@

    QF_UFNIA (Single Query Track)

    - + 2019-CVC4n 0 1994899.3914899.46519919901016 @@ -329,7 +329,7 @@

    QF_UFNIA (Single Query Track)

    - + MathSAT5n 0 18724135.71424134.764187187011333 @@ -338,7 +338,7 @@

    QF_UFNIA (Single Query Track)

    - + z3n 0 18214205.96214206.446182182011815 @@ -347,7 +347,7 @@

    QF_UFNIA (Single Query Track)

    - + Alt-Ergo 0 07748.4997462.340003006 @@ -367,7 +367,7 @@

    QF_UFNIA (Single Query Track)

    - + MathSAT5n 0 809662.9519662.9758008022033 @@ -376,7 +376,7 @@

    QF_UFNIA (Single Query Track)

    - + CVC4 0 791209.5731209.549790792214 @@ -385,7 +385,7 @@

    QF_UFNIA (Single Query Track)

    - + 2019-CVC4n 0 772405.3662405.371770772236 @@ -394,7 +394,7 @@

    QF_UFNIA (Single Query Track)

    - + z3n 0 774547.1454547.2187707722315 @@ -403,7 +403,7 @@

    QF_UFNIA (Single Query Track)

    - + Yices2 0 7219201.36719201.4967207222822 @@ -412,7 +412,7 @@

    QF_UFNIA (Single Query Track)

    - + Yices2-fixedn 0 7219201.40719201.7237207222822 @@ -421,7 +421,7 @@

    QF_UFNIA (Single Query Track)

    - + Alt-Ergo 0 3915.6755.939390392616 @@ -441,7 +441,7 @@

    QF_UFNIA (Single Query Track)

    - + CVC4 0 279195.765195.69727920079217 @@ -450,7 +450,7 @@

    QF_UFNIA (Single Query Track)

    - + 2019-CVC4n 0 275254.862254.93527519877259 @@ -459,7 +459,7 @@

    QF_UFNIA (Single Query Track)

    - + Yices2 0 271705.052705.28271199722929 @@ -468,7 +468,7 @@

    QF_UFNIA (Single Query Track)

    - + Yices2-fixedn 0 271705.196705.924271199722929 @@ -477,7 +477,7 @@

    QF_UFNIA (Single Query Track)

    - + MathSAT5n 0 266975.488974.559266187793434 @@ -486,7 +486,7 @@

    QF_UFNIA (Single Query Track)

    - + z3n 0 252938.981939.021252176764830 @@ -495,7 +495,7 @@

    QF_UFNIA (Single Query Track)

    - + Alt-Ergo 0 39252.597196.448390392617 @@ -519,7 +519,6 @@

    QF_UFNIA (Single Query Track)

    - + - diff --git a/archive/2020/results/qf-ufnra-single-query.html b/archive/2020/results/qf-ufnra-single-query.html index 9a285a55..893ac4e3 100644 --- a/archive/2020/results/qf-ufnra-single-query.html +++ b/archive/2020/results/qf-ufnra-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFNRA (Single Query Track)

    Competition results for the QF_UFNRA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_UFNRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Yices2Yices2Yices2 - - + + Yices2 - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_UFNRA (Single Query Track)

    - + 2019-Par4n 0 26 @@ -142,7 +142,7 @@

    QF_UFNRA (Single Query Track)

    - + Yices2 0 25 @@ -153,7 +153,7 @@

    QF_UFNRA (Single Query Track)

    - + Yices2-fixedn 0 25 @@ -164,7 +164,7 @@

    QF_UFNRA (Single Query Track)

    - + z3n 0 21 @@ -175,7 +175,7 @@

    QF_UFNRA (Single Query Track)

    - + CVC4 0 12 @@ -186,7 +186,7 @@

    QF_UFNRA (Single Query Track)

    - + MathSAT5n 0 8 @@ -197,7 +197,7 @@

    QF_UFNRA (Single Query Track)

    - + veriT+raSAT+Redlog 0 1 @@ -208,7 +208,7 @@

    QF_UFNRA (Single Query Track)

    - + Alt-Ergo 0 0 @@ -230,7 +230,7 @@

    QF_UFNRA (Single Query Track)

    - + 2019-Par4n 0 264543.2622873.8072624211 @@ -239,7 +239,7 @@

    QF_UFNRA (Single Query Track)

    - + Yices2 0 2510360.71510361.3072523222 @@ -248,7 +248,7 @@

    QF_UFNRA (Single Query Track)

    - + Yices2-fixedn 0 2510528.99210529.7962523222 @@ -257,7 +257,7 @@

    QF_UFNRA (Single Query Track)

    - + z3n 0 21460.452443.5922119260 @@ -266,7 +266,7 @@

    QF_UFNRA (Single Query Track)

    - + CVC4 0 1218506.25918506.559121021514 @@ -275,7 +275,7 @@

    QF_UFNRA (Single Query Track)

    - + MathSAT5n 0 821744.38921744.4018621918 @@ -284,7 +284,7 @@

    QF_UFNRA (Single Query Track)

    - + veriT+raSAT+Redlog 0 131200.00731200.011102626 @@ -293,7 +293,7 @@

    QF_UFNRA (Single Query Track)

    - + Alt-Ergo 0 031200.18731200.0670002726 @@ -313,7 +313,7 @@

    QF_UFNRA (Single Query Track)

    - + 2019-Par4n 0 244543.2492873.5552424031 @@ -322,7 +322,7 @@

    QF_UFNRA (Single Query Track)

    - + Yices2 0 2310359.98710360.5792323042 @@ -331,7 +331,7 @@

    QF_UFNRA (Single Query Track)

    - + Yices2-fixedn 0 2310528.26510529.0692323042 @@ -340,7 +340,7 @@

    QF_UFNRA (Single Query Track)

    - + z3n 0 19460.072443.2081919080 @@ -349,7 +349,7 @@

    QF_UFNRA (Single Query Track)

    - + CVC4 0 1018500.22718500.527101001714 @@ -358,7 +358,7 @@

    QF_UFNRA (Single Query Track)

    - + MathSAT5n 0 621744.15321744.1656602118 @@ -367,7 +367,7 @@

    QF_UFNRA (Single Query Track)

    - + veriT+raSAT+Redlog 0 128800.00728800.011102626 @@ -376,7 +376,7 @@

    QF_UFNRA (Single Query Track)

    - + Alt-Ergo 0 028800.18728800.0670002726 @@ -396,7 +396,7 @@

    QF_UFNRA (Single Query Track)

    - + MathSAT5n 0 20.2360.2362022518 @@ -405,7 +405,7 @@

    QF_UFNRA (Single Query Track)

    - + 2019-Par4n 0 20.0120.252202251 @@ -414,7 +414,7 @@

    QF_UFNRA (Single Query Track)

    - + z3n 0 20.380.383202250 @@ -423,7 +423,7 @@

    QF_UFNRA (Single Query Track)

    - + Yices2-fixedn 0 20.7270.727202252 @@ -432,7 +432,7 @@

    QF_UFNRA (Single Query Track)

    - + Yices2 0 20.7280.728202252 @@ -441,7 +441,7 @@

    QF_UFNRA (Single Query Track)

    - + CVC4 0 26.0326.0322022514 @@ -450,7 +450,7 @@

    QF_UFNRA (Single Query Track)

    - + veriT+raSAT+Redlog 0 02400.02400.00002726 @@ -459,7 +459,7 @@

    QF_UFNRA (Single Query Track)

    - + Alt-Ergo 0 02400.02400.00002726 @@ -479,7 +479,7 @@

    QF_UFNRA (Single Query Track)

    - + 2019-Par4n 0 19348.712271.8921917288 @@ -488,7 +488,7 @@

    QF_UFNRA (Single Query Track)

    - + z3n 0 17302.323285.44817152106 @@ -497,7 +497,7 @@

    QF_UFNRA (Single Query Track)

    - + Yices2 0 5549.551549.5595322222 @@ -506,7 +506,7 @@

    QF_UFNRA (Single Query Track)

    - + Yices2-fixedn 0 5549.664549.6675322222 @@ -515,7 +515,7 @@

    QF_UFNRA (Single Query Track)

    - + MathSAT5n 0 4555.855555.8564222322 @@ -524,7 +524,7 @@

    QF_UFNRA (Single Query Track)

    - + CVC4 0 3582.051582.053122424 @@ -533,7 +533,7 @@

    QF_UFNRA (Single Query Track)

    - + veriT+raSAT+Redlog 0 1624.007624.011102626 @@ -542,7 +542,7 @@

    QF_UFNRA (Single Query Track)

    - + Alt-Ergo 0 0624.187624.0670002726 @@ -566,7 +566,6 @@

    QF_UFNRA (Single Query Track)

    - + - diff --git a/archive/2020/results/results-incremental.html b/archive/2020/results/results-incremental.html index 77064cb3..bda998e8 100644 --- a/archive/2020/results/results-incremental.html +++ b/archive/2020/results/results-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -190,7 +190,6 @@

    SMT-COMP 2020 Results - Incremental Track (Summary)

    - + - diff --git a/archive/2020/results/results-model-validation.html b/archive/2020/results/results-model-validation.html index cd922378..7bea2425 100644 --- a/archive/2020/results/results-model-validation.html +++ b/archive/2020/results/results-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -127,7 +127,6 @@

    SMT-COMP 2020 Results - Model Validation Track (Summary)

    - + - diff --git a/archive/2020/results/results-single-query.html b/archive/2020/results/results-single-query.html index 12310d1f..32e5f15b 100644 --- a/archive/2020/results/results-single-query.html +++ b/archive/2020/results/results-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -630,7 +630,6 @@

    SMT-COMP 2020 Results - Single Query Track (Summary)

    - + - diff --git a/archive/2020/results/results-unsat-core.html b/archive/2020/results/results-unsat-core.html index 17ede844..bd43777b 100644 --- a/archive/2020/results/results-unsat-core.html +++ b/archive/2020/results/results-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -282,7 +282,6 @@

    SMT-COMP 2020 Results - Unsat Core Track (Summary)

    - + - diff --git a/archive/2020/results/uf-incremental.html b/archive/2020/results/uf-incremental.html index f7a44a93..5f059687 100644 --- a/archive/2020/results/uf-incremental.html +++ b/archive/2020/results/uf-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UF (Incremental Track)

    Competition results for the UF - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    UF (Incremental Track)

    Parallel Performance CVC4-inc - - + + @@ -124,7 +124,7 @@

    UF (Incremental Track)

    - + z3n 0 85771915196.49915514.105217050706 @@ -133,7 +133,7 @@

    UF (Incremental Track)

    - + CVC4-inc 0 30525715777.916715751.644272296588 @@ -142,7 +142,7 @@

    UF (Incremental Track)

    - + SMTInterpol 0 266621642708.2431635526.2052761591339 @@ -151,7 +151,7 @@

    UF (Incremental Track)

    - + SMTInterpol-fixedn 0 266471642655.3971635644.5812761741338 @@ -160,7 +160,7 @@

    UF (Incremental Track)

    - + UltimateEliminator+MathSAT 0 06584.073039.563028210 @@ -184,7 +184,6 @@

    UF (Incremental Track)

    - + - diff --git a/archive/2020/results/uf-single-query.html b/archive/2020/results/uf-single-query.html index 1a1aa940..7742f302 100644 --- a/archive/2020/results/uf-single-query.html +++ b/archive/2020/results/uf-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UF (Single Query Track)

    Competition results for the UF - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UF (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) VampireVampireVampire - - + + CVC4 - - + + Vampire - + @@ -131,7 +131,7 @@

    UF (Single Query Track)

    - + Vampire 0 973 @@ -142,7 +142,7 @@

    UF (Single Query Track)

    - + CVC4 0 963 @@ -153,7 +153,7 @@

    UF (Single Query Track)

    - + 2019-CVC4n 0 944 @@ -164,7 +164,7 @@

    UF (Single Query Track)

    - + 2018-Vampiren 0 840 @@ -175,7 +175,7 @@

    UF (Single Query Track)

    - + veriT 0 588 @@ -186,7 +186,7 @@

    UF (Single Query Track)

    - + veriT+viten 0 548 @@ -197,7 +197,7 @@

    UF (Single Query Track)

    - + Alt-Ergo 0 509 @@ -208,7 +208,7 @@

    UF (Single Query Track)

    - + z3n 0 413 @@ -219,7 +219,7 @@

    UF (Single Query Track)

    - + SMTInterpol-fixedn 0 105 @@ -230,7 +230,7 @@

    UF (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -241,7 +241,7 @@

    UF (Single Query Track)

    - + SMTInterpol 1 104 @@ -263,7 +263,7 @@

    UF (Single Query Track)

    - + Vampire 0 10081702696.1051580071.11100839960912831283 @@ -272,7 +272,7 @@

    UF (Single Query Track)

    - + 2018-Vampiren 0 9632340432.8181683107.23896338058313281328 @@ -281,7 +281,7 @@

    UF (Single Query Track)

    - + CVC4 0 9631769821.1341773088.02696333862513281328 @@ -290,7 +290,7 @@

    UF (Single Query Track)

    - + 2019-CVC4n 0 9461930178.2361935088.21394632462213451345 @@ -299,7 +299,7 @@

    UF (Single Query Track)

    - + veriT 0 5882077587.712077592.118588058817031677 @@ -308,7 +308,7 @@

    UF (Single Query Track)

    - + veriT+viten 0 5482086772.0612086757.405548054817431496 @@ -317,7 +317,7 @@

    UF (Single Query Track)

    - + Alt-Ergo 0 5312162172.1962048035.975531053117601650 @@ -326,7 +326,7 @@

    UF (Single Query Track)

    - + z3n 0 4131727169.9231727245.9224136335018781108 @@ -335,7 +335,7 @@

    UF (Single Query Track)

    - + SMTInterpol-fixedn 0 1052648041.9062604284.502105258021862151 @@ -344,7 +344,7 @@

    UF (Single Query Track)

    - + UltimateEliminator+MathSAT 0 08276.5995505.55300022910 @@ -353,7 +353,7 @@

    UF (Single Query Track)

    - + SMTInterpol 1 1042644135.1762604052.602104257921872152 @@ -373,7 +373,7 @@

    UF (Single Query Track)

    - + Vampire 0 39948599.72420509.544399399018921283 @@ -382,7 +382,7 @@

    UF (Single Query Track)

    - + 2018-Vampiren 0 38058114.67738901.72380380019111328 @@ -391,7 +391,7 @@

    UF (Single Query Track)

    - + CVC4 0 338236920.444239970.226338338019531328 @@ -400,7 +400,7 @@

    UF (Single Query Track)

    - + 2019-CVC4n 0 324383250.437387934.826324324019671345 @@ -409,7 +409,7 @@

    UF (Single Query Track)

    - + z3n 0 63312126.677312138.9526363022281108 @@ -418,7 +418,7 @@

    UF (Single Query Track)

    - + SMTInterpol 0 25449505.582445652.6582525022662152 @@ -427,7 +427,7 @@

    UF (Single Query Track)

    - + SMTInterpol-fixedn 0 25449545.292445696.9262525022662151 @@ -436,7 +436,7 @@

    UF (Single Query Track)

    - + UltimateEliminator+MathSAT 0 01438.868974.19700022910 @@ -445,7 +445,7 @@

    UF (Single Query Track)

    - + Alt-Ergo 0 0434927.964416704.77400022911650 @@ -454,7 +454,7 @@

    UF (Single Query Track)

    - + veriT 0 0489600.0489600.000022911677 @@ -463,7 +463,7 @@

    UF (Single Query Track)

    - + veriT+viten 0 0489600.0489600.000022911496 @@ -483,7 +483,7 @@

    UF (Single Query Track)

    - + CVC4 0 62590500.6990717.8625062516661328 @@ -492,7 +492,7 @@

    UF (Single Query Track)

    - + 2019-CVC4n 0 622104527.798104753.386622062216691345 @@ -501,7 +501,7 @@

    UF (Single Query Track)

    - + Vampire 0 609204495.741117177.895609060916821283 @@ -510,7 +510,7 @@

    UF (Single Query Track)

    - + veriT 0 588146160.29146164.41588058817031677 @@ -519,7 +519,7 @@

    UF (Single Query Track)

    - + 2018-Vampiren 0 583476247.331203368.288583058317081328 @@ -528,7 +528,7 @@

    UF (Single Query Track)

    - + veriT+viten 0 548162687.138162671.795548054817431496 @@ -537,7 +537,7 @@

    UF (Single Query Track)

    - + Alt-Ergo 0 531262274.905199806.169531053117601650 @@ -546,7 +546,7 @@

    UF (Single Query Track)

    - + z3n 0 350311738.15311748.339350035019411108 @@ -555,7 +555,7 @@

    UF (Single Query Track)

    - + SMTInterpol-fixedn 0 80729656.704720418.5888008022112151 @@ -564,7 +564,7 @@

    UF (Single Query Track)

    - + UltimateEliminator+MathSAT 0 02477.3781636.73200022910 @@ -573,7 +573,7 @@

    UF (Single Query Track)

    - + SMTInterpol 1 79727004.724720615.3477907922122152 @@ -593,7 +593,7 @@

    UF (Single Query Track)

    - + Vampire 0 79341101.05537348.41779332347014981498 @@ -602,7 +602,7 @@

    UF (Single Query Track)

    - + 2018-Vampiren 0 70842853.11539306.87170833837015831583 @@ -611,7 +611,7 @@

    UF (Single Query Track)

    - + veriT+viten 0 53443041.143025.434534053417571521 @@ -620,7 +620,7 @@

    UF (Single Query Track)

    - + 2019-CVC4n 0 51642998.97942976.385162748917751775 @@ -629,7 +629,7 @@

    UF (Single Query Track)

    - + CVC4 0 51542996.7742994.0155152748817761776 @@ -638,7 +638,7 @@

    UF (Single Query Track)

    - + veriT 0 46744252.74344252.679467046718241799 @@ -647,7 +647,7 @@

    UF (Single Query Track)

    - + Alt-Ergo 0 46646210.43543425.886466046618251728 @@ -656,7 +656,7 @@

    UF (Single Query Track)

    - + z3n 0 38046234.75246226.2283805932119111871 @@ -665,7 +665,7 @@

    UF (Single Query Track)

    - + SMTInterpol-fixedn 0 9952664.16752501.65199257421922182 @@ -674,7 +674,7 @@

    UF (Single Query Track)

    - + UltimateEliminator+MathSAT 0 08276.5995505.55300022910 @@ -683,7 +683,7 @@

    UF (Single Query Track)

    - + SMTInterpol 1 9852674.45352501.12698257321932182 @@ -707,7 +707,6 @@

    UF (Single Query Track)

    - + - diff --git a/archive/2020/results/uf-unsat-core.html b/archive/2020/results/uf-unsat-core.html index bca349e2..2c60dad9 100644 --- a/archive/2020/results/uf-unsat-core.html +++ b/archive/2020/results/uf-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UF (Unsat Core Track)

    Competition results for the UF - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    UF (Unsat Core Track)

    Sequential PerformanceParallel Performance CVC4-ucCVC4-uc - - + + @@ -126,7 +126,7 @@

    UF (Unsat Core Track)

    - + CVC4-uc 0 379199 @@ -137,7 +137,7 @@

    UF (Unsat Core Track)

    - + z3n 0 317029 @@ -148,7 +148,7 @@

    UF (Unsat Core Track)

    - + SMTInterpol-fixedn 0 154255 @@ -159,7 +159,7 @@

    UF (Unsat Core Track)

    - + SMTInterpol 0 154243 @@ -170,7 +170,7 @@

    UF (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -192,7 +192,7 @@

    UF (Unsat Core Track)

    - + CVC4-uc 0 379199150345.087150345.608119 @@ -201,7 +201,7 @@

    UF (Unsat Core Track)

    - + z3n 0 317029280205.145280185.209178 @@ -210,7 +210,7 @@

    UF (Unsat Core Track)

    - + SMTInterpol-fixedn 0 1542551038770.9291024673.76845 @@ -219,7 +219,7 @@

    UF (Unsat Core Track)

    - + SMTInterpol 0 1542431042737.3611024407.562844 @@ -228,7 +228,7 @@

    UF (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 06209.2234509.1321 @@ -252,7 +252,6 @@

    UF (Unsat Core Track)

    - + - diff --git a/archive/2020/results/ufbv-single-query.html b/archive/2020/results/ufbv-single-query.html index 1b163e03..c6694d29 100644 --- a/archive/2020/results/ufbv-single-query.html +++ b/archive/2020/results/ufbv-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFBV (Single Query Track)

    Competition results for the UFBV - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UFBV (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) CVC4CVC4— - - + + CVC4 - - + + CVC4 - + @@ -131,7 +131,7 @@

    UFBV (Single Query Track)

    - + 2018-Z3n 0 50 @@ -142,7 +142,7 @@

    UFBV (Single Query Track)

    - + 2019-Par4n 0 50 @@ -153,7 +153,7 @@

    UFBV (Single Query Track)

    - + z3n 0 49 @@ -164,7 +164,7 @@

    UFBV (Single Query Track)

    - + CVC4 0 27 @@ -175,7 +175,7 @@

    UFBV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 6 @@ -197,7 +197,7 @@

    UFBV (Single Query Track)

    - + 2019-Par4n 0 5026527.1226468.8875018322222 @@ -206,7 +206,7 @@

    UFBV (Single Query Track)

    - + 2018-Z3n 0 5026469.42526469.4435018322222 @@ -215,7 +215,7 @@

    UFBV (Single Query Track)

    - + z3n 0 4927632.83927632.8454918312323 @@ -224,7 +224,7 @@

    UFBV (Single Query Track)

    - + CVC4 0 2737777.53437945.354270274526 @@ -233,7 +233,7 @@

    UFBV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 65022.7374853.305606663 @@ -253,7 +253,7 @@

    UFBV (Single Query Track)

    - + z3n 0 182.1632.164181805423 @@ -262,7 +262,7 @@

    UFBV (Single Query Track)

    - + 2019-Par4n 0 181.3522.844181805422 @@ -271,7 +271,7 @@

    UFBV (Single Query Track)

    - + 2018-Z3n 0 183.0063.007181805422 @@ -280,7 +280,7 @@

    UFBV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0890.169828.629000723 @@ -289,7 +289,7 @@

    UFBV (Single Query Track)

    - + CVC4 0 010802.00710802.0070007226 @@ -309,7 +309,7 @@

    UFBV (Single Query Track)

    - + 2019-Par4n 0 321325.7671266.043320324022 @@ -318,7 +318,7 @@

    UFBV (Single Query Track)

    - + 2018-Z3n 0 321266.4181266.436320324022 @@ -327,7 +327,7 @@

    UFBV (Single Query Track)

    - + z3n 0 312430.6762430.681310314123 @@ -336,7 +336,7 @@

    UFBV (Single Query Track)

    - + CVC4 0 275947.7315968.374270274526 @@ -345,7 +345,7 @@

    UFBV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 6122.01682.416606663 @@ -365,7 +365,7 @@

    UFBV (Single Query Track)

    - + z3n 0 49584.839584.8454918312323 @@ -374,7 +374,7 @@

    UFBV (Single Query Track)

    - + 2019-Par4n 0 49614.22588.3384918312323 @@ -383,7 +383,7 @@

    UFBV (Single Query Track)

    - + 2018-Z3n 0 49588.933588.9374918312323 @@ -392,7 +392,7 @@

    UFBV (Single Query Track)

    - + CVC4 0 151251.7471251.761150155747 @@ -401,7 +401,7 @@

    UFBV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 6451.845346.457606667 @@ -425,7 +425,6 @@

    UFBV (Single Query Track)

    - + - diff --git a/archive/2020/results/ufdt-single-query.html b/archive/2020/results/ufdt-single-query.html index c2c5695a..e8738611 100644 --- a/archive/2020/results/ufdt-single-query.html +++ b/archive/2020/results/ufdt-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFDT (Single Query Track)

    Competition results for the UFDT - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UFDT (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) CVC4CVC4CVC4 - - + + CVC4 - - + + Alt-Ergo - + @@ -131,7 +131,7 @@

    UFDT (Single Query Track)

    - + CVC4 0 420 @@ -142,7 +142,7 @@

    UFDT (Single Query Track)

    - + 2019-CVC4n 0 400 @@ -153,7 +153,7 @@

    UFDT (Single Query Track)

    - + Vampire 0 289 @@ -164,7 +164,7 @@

    UFDT (Single Query Track)

    - + Alt-Ergo 0 286 @@ -175,7 +175,7 @@

    UFDT (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -197,7 +197,7 @@

    UFDT (Single Query Track)

    - + CVC4 0 4201024172.891026466.907420101319803803 @@ -206,7 +206,7 @@

    UFDT (Single Query Track)

    - + 2019-CVC4n 0 4001074414.771075841.18640083317823823 @@ -215,7 +215,7 @@

    UFDT (Single Query Track)

    - + Vampire 0 3271226988.3071113189.02132714313896896 @@ -224,7 +224,7 @@

    UFDT (Single Query Track)

    - + Alt-Ergo 0 2961114919.4961045077.2322960296927846 @@ -233,7 +233,7 @@

    UFDT (Single Query Track)

    - + UltimateEliminator+MathSAT 0 03996.7632815.69200012230 @@ -253,7 +253,7 @@

    UFDT (Single Query Track)

    - + CVC4 0 10146891.63949061.87110110101122803 @@ -262,7 +262,7 @@

    UFDT (Single Query Track)

    - + 2019-CVC4n 0 8392433.84193578.963838301140823 @@ -271,7 +271,7 @@

    UFDT (Single Query Track)

    - + Vampire 0 14159981.61120977.842141401209896 @@ -280,7 +280,7 @@

    UFDT (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0337.099250.02800012230 @@ -289,7 +289,7 @@

    UFDT (Single Query Track)

    - + Alt-Ergo 0 079377.30279262.6820001223846 @@ -309,7 +309,7 @@

    UFDT (Single Query Track)

    - + CVC4 0 31962881.25263005.0363190319904803 @@ -318,7 +318,7 @@

    UFDT (Single Query Track)

    - + 2019-CVC4n 0 31767580.92967862.2233170317906823 @@ -327,7 +327,7 @@

    UFDT (Single Query Track)

    - + Vampire 0 313145406.58777820.1493130313910896 @@ -336,7 +336,7 @@

    UFDT (Single Query Track)

    - + Alt-Ergo 0 296105411.86779571.6222960296927846 @@ -345,7 +345,7 @@

    UFDT (Single Query Track)

    - + UltimateEliminator+MathSAT 0 01165.814811.63500012230 @@ -365,7 +365,7 @@

    UFDT (Single Query Track)

    - + Alt-Ergo 0 26522771.58721884.6712650265958879 @@ -374,7 +374,7 @@

    UFDT (Single Query Track)

    - + 2019-CVC4n 0 25823376.30623375.2392585253965965 @@ -383,7 +383,7 @@

    UFDT (Single Query Track)

    - + CVC4 0 25523468.69323468.7592555250968968 @@ -392,7 +392,7 @@

    UFDT (Single Query Track)

    - + Vampire 0 23527037.124578.2542350235988988 @@ -401,7 +401,7 @@

    UFDT (Single Query Track)

    - + UltimateEliminator+MathSAT 0 04017.4812811.95100012231 @@ -425,7 +425,6 @@

    UFDT (Single Query Track)

    - + - diff --git a/archive/2020/results/ufdt-unsat-core.html b/archive/2020/results/ufdt-unsat-core.html index 6a1bb9f8..d33f49df 100644 --- a/archive/2020/results/ufdt-unsat-core.html +++ b/archive/2020/results/ufdt-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFDT (Unsat Core Track)

    Competition results for the UFDT - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    UFDT (Unsat Core Track)

    Sequential PerformanceParallel Performance CVC4-ucCVC4-uc - - + + @@ -126,7 +126,7 @@

    UFDT (Unsat Core Track)

    - + CVC4-uc 0 244198 @@ -137,7 +137,7 @@

    UFDT (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -159,7 +159,7 @@

    UFDT (Unsat Core Track)

    - + CVC4-uc 0 24419814668.57414668.65512 @@ -168,7 +168,7 @@

    UFDT (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 02473.2041687.9170 @@ -192,7 +192,6 @@

    UFDT (Unsat Core Track)

    - + - diff --git a/archive/2020/results/ufdtlia-single-query.html b/archive/2020/results/ufdtlia-single-query.html index ce280f04..63124202 100644 --- a/archive/2020/results/ufdtlia-single-query.html +++ b/archive/2020/results/ufdtlia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFDTLIA (Single Query Track)

    Competition results for the UFDTLIA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UFDTLIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) CVC4CVC4— - - + + CVC4 - - + + Vampire - + @@ -131,7 +131,7 @@

    UFDTLIA (Single Query Track)

    - + 2019-CVC4n 0 196 @@ -142,7 +142,7 @@

    UFDTLIA (Single Query Track)

    - + CVC4 0 195 @@ -153,7 +153,7 @@

    UFDTLIA (Single Query Track)

    - + Vampire 0 126 @@ -164,7 +164,7 @@

    UFDTLIA (Single Query Track)

    - + Alt-Ergo 0 33 @@ -175,7 +175,7 @@

    UFDTLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -197,7 +197,7 @@

    UFDTLIA (Single Query Track)

    - + 2019-CVC4n 0 196203161.694205130.97919601968181 @@ -206,7 +206,7 @@

    UFDTLIA (Single Query Track)

    - + CVC4 0 195151304.678152483.71219501958282 @@ -215,7 +215,7 @@

    UFDTLIA (Single Query Track)

    - + Vampire 0 127187798.802182993.4151270127150150 @@ -224,7 +224,7 @@

    UFDTLIA (Single Query Track)

    - + Alt-Ergo 0 36309733.045290551.20836036241241 @@ -233,7 +233,7 @@

    UFDTLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0906.745641.3570002770 @@ -253,7 +253,7 @@

    UFDTLIA (Single Query Track)

    - + 2019-CVC4n 0 00.00.000027781 @@ -262,7 +262,7 @@

    UFDTLIA (Single Query Track)

    - + Alt-Ergo 0 00.00.0000277241 @@ -271,7 +271,7 @@

    UFDTLIA (Single Query Track)

    - + Vampire 0 00.00.0000277150 @@ -280,7 +280,7 @@

    UFDTLIA (Single Query Track)

    - + CVC4 0 00.00.000027782 @@ -289,7 +289,7 @@

    UFDTLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.00002770 @@ -309,7 +309,7 @@

    UFDTLIA (Single Query Track)

    - + 2019-CVC4n 0 196113161.694115130.97919601968181 @@ -318,7 +318,7 @@

    UFDTLIA (Single Query Track)

    - + CVC4 0 19561304.67862483.71219501958282 @@ -327,7 +327,7 @@

    UFDTLIA (Single Query Track)

    - + Vampire 0 12797798.80292993.4151270127150150 @@ -336,7 +336,7 @@

    UFDTLIA (Single Query Track)

    - + Alt-Ergo 0 36219733.045200551.20836036241241 @@ -345,7 +345,7 @@

    UFDTLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0658.145469.7650002770 @@ -365,7 +365,7 @@

    UFDTLIA (Single Query Track)

    - + Vampire 0 1136900.7395551.7591130113164164 @@ -374,7 +374,7 @@

    UFDTLIA (Single Query Track)

    - + Alt-Ergo 0 306037.0955956.84430030247247 @@ -383,7 +383,7 @@

    UFDTLIA (Single Query Track)

    - + CVC4 0 286036.9726036.97528028249249 @@ -392,7 +392,7 @@

    UFDTLIA (Single Query Track)

    - + 2019-CVC4n 0 276050.9036050.89727027250250 @@ -401,7 +401,7 @@

    UFDTLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0906.745641.3570002770 @@ -425,7 +425,6 @@

    UFDTLIA (Single Query Track)

    - + - diff --git a/archive/2020/results/ufdtlira-single-query.html b/archive/2020/results/ufdtlira-single-query.html index 9d9d0974..e935c55c 100644 --- a/archive/2020/results/ufdtlira-single-query.html +++ b/archive/2020/results/ufdtlira-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFDTLIRA (Single Query Track)

    Competition results for the UFDTLIRA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UFDTLIRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) CVC4CVC4CVC4 - - + + Vampire - - + + CVC4 - + @@ -131,7 +131,7 @@

    UFDTLIRA (Single Query Track)

    - + CVC4 0 2533 @@ -142,7 +142,7 @@

    UFDTLIRA (Single Query Track)

    - + Alt-Ergo 0 2462 @@ -153,7 +153,7 @@

    UFDTLIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -164,7 +164,7 @@

    UFDTLIRA (Single Query Track)

    - + Vampire 29 2750 @@ -186,7 +186,7 @@

    UFDTLIRA (Single Query Track)

    - + CVC4 0 25334955.0884954.62525336924647414 @@ -195,7 +195,7 @@

    UFDTLIRA (Single Query Track)

    - + Alt-Ergo 0 2463138826.56133074.278246302463811100 @@ -204,7 +204,7 @@

    UFDTLIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 010826.9287409.41800032740 @@ -213,7 +213,7 @@

    UFDTLIRA (Single Query Track)

    - + Vampire 29 2751714410.153628462.5472751102741523494 @@ -233,7 +233,7 @@

    UFDTLIRA (Single Query Track)

    - + CVC4 0 691.2851.2776969032054 @@ -242,7 +242,7 @@

    UFDTLIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0248.52160.3400032740 @@ -251,7 +251,7 @@

    UFDTLIRA (Single Query Track)

    - + Alt-Ergo 0 01209.4591203.5160003274100 @@ -260,7 +260,7 @@

    UFDTLIRA (Single Query Track)

    - + Vampire 29 1040901.65939381.417101003264494 @@ -280,7 +280,7 @@

    UFDTLIRA (Single Query Track)

    - + Vampire 0 2741107107.86440696.82274102741533494 @@ -289,7 +289,7 @@

    UFDTLIRA (Single Query Track)

    - + CVC4 0 24644940.214939.7922464024648104 @@ -298,7 +298,7 @@

    UFDTLIRA (Single Query Track)

    - + Alt-Ergo 0 246360897.7157187.223246302463811100 @@ -307,7 +307,7 @@

    UFDTLIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 09063.2776214.21100032740 @@ -327,7 +327,7 @@

    UFDTLIRA (Single Query Track)

    - + CVC4 0 2533251.088250.62525336924647414 @@ -336,7 +336,7 @@

    UFDTLIRA (Single Query Track)

    - + Alt-Ergo 0 24624894.8033986.83246202462812133 @@ -345,7 +345,7 @@

    UFDTLIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 010826.9287409.41800032740 @@ -354,7 +354,7 @@

    UFDTLIRA (Single Query Track)

    - + Vampire 28 234335296.19229391.507234302343931903 @@ -378,7 +378,6 @@

    UFDTLIRA (Single Query Track)

    - + - diff --git a/archive/2020/results/ufdtlira-unsat-core.html b/archive/2020/results/ufdtlira-unsat-core.html index 957f1f19..786181e8 100644 --- a/archive/2020/results/ufdtlira-unsat-core.html +++ b/archive/2020/results/ufdtlira-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFDTLIRA (Unsat Core Track)

    Competition results for the UFDTLIRA - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    UFDTLIRA (Unsat Core Track)

    Sequential PerformanceParallel Performance CVC4-ucCVC4-uc - - + + @@ -126,7 +126,7 @@

    UFDTLIRA (Unsat Core Track)

    - + CVC4-uc 0 61864 @@ -137,7 +137,7 @@

    UFDTLIRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -159,7 +159,7 @@

    UFDTLIRA (Unsat Core Track)

    - + CVC4-uc 0 61864120.754122.4780 @@ -168,7 +168,7 @@

    UFDTLIRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 07891.5415416.8190 @@ -192,7 +192,6 @@

    UFDTLIRA (Unsat Core Track)

    - + - diff --git a/archive/2020/results/ufdtnia-single-query.html b/archive/2020/results/ufdtnia-single-query.html index 3874861e..b19f767a 100644 --- a/archive/2020/results/ufdtnia-single-query.html +++ b/archive/2020/results/ufdtnia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFDTNIA (Single Query Track)

    Competition results for the UFDTNIA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UFDTNIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) VampireVampire— - - + + Vampire - - + + Vampire - + @@ -131,7 +131,7 @@

    UFDTNIA (Single Query Track)

    - + Vampire 0 1 @@ -142,7 +142,7 @@

    UFDTNIA (Single Query Track)

    - + 2019-Vampiren 0 1 @@ -153,7 +153,7 @@

    UFDTNIA (Single Query Track)

    - + CVC4 0 1 @@ -164,7 +164,7 @@

    UFDTNIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -175,7 +175,7 @@

    UFDTNIA (Single Query Track)

    - + Alt-Ergo 0 0 @@ -197,7 +197,7 @@

    UFDTNIA (Single Query Track)

    - + Vampire 0 10.0640.06610100 @@ -206,7 +206,7 @@

    UFDTNIA (Single Query Track)

    - + 2019-Vampiren 0 10.1160.11610100 @@ -215,7 +215,7 @@

    UFDTNIA (Single Query Track)

    - + CVC4 0 115.34815.3510100 @@ -224,7 +224,7 @@

    UFDTNIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 03.2232.26500010 @@ -233,7 +233,7 @@

    UFDTNIA (Single Query Track)

    - + Alt-Ergo 0 01200.01200.000011 @@ -253,7 +253,7 @@

    UFDTNIA (Single Query Track)

    - + 2019-Vampiren 0 00.00.000010 @@ -262,7 +262,7 @@

    UFDTNIA (Single Query Track)

    - + Alt-Ergo 0 00.00.000011 @@ -271,7 +271,7 @@

    UFDTNIA (Single Query Track)

    - + Vampire 0 00.00.000010 @@ -280,7 +280,7 @@

    UFDTNIA (Single Query Track)

    - + CVC4 0 00.00.000010 @@ -289,7 +289,7 @@

    UFDTNIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.000010 @@ -309,7 +309,7 @@

    UFDTNIA (Single Query Track)

    - + Vampire 0 10.0640.06610100 @@ -318,7 +318,7 @@

    UFDTNIA (Single Query Track)

    - + 2019-Vampiren 0 10.1160.11610100 @@ -327,7 +327,7 @@

    UFDTNIA (Single Query Track)

    - + CVC4 0 115.34815.3510100 @@ -336,7 +336,7 @@

    UFDTNIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 03.2232.26500010 @@ -345,7 +345,7 @@

    UFDTNIA (Single Query Track)

    - + Alt-Ergo 0 01200.01200.000011 @@ -365,7 +365,7 @@

    UFDTNIA (Single Query Track)

    - + Vampire 0 10.0640.06610100 @@ -374,7 +374,7 @@

    UFDTNIA (Single Query Track)

    - + 2019-Vampiren 0 10.1160.11610100 @@ -383,7 +383,7 @@

    UFDTNIA (Single Query Track)

    - + CVC4 0 115.34815.3510100 @@ -392,7 +392,7 @@

    UFDTNIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 03.2232.26500010 @@ -401,7 +401,7 @@

    UFDTNIA (Single Query Track)

    - + Alt-Ergo 0 024.024.000011 @@ -425,7 +425,6 @@

    UFDTNIA (Single Query Track)

    - + - diff --git a/archive/2020/results/ufdtnira-single-query.html b/archive/2020/results/ufdtnira-single-query.html index 6b1fdb6f..70740303 100644 --- a/archive/2020/results/ufdtnira-single-query.html +++ b/archive/2020/results/ufdtnira-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFDTNIRA (Single Query Track)

    Competition results for the UFDTNIRA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UFDTNIRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) CVC4Vampire— - - + + Vampire - - + + CVC4 - + @@ -131,7 +131,7 @@

    UFDTNIRA (Single Query Track)

    - + CVC4 0 659 @@ -142,7 +142,7 @@

    UFDTNIRA (Single Query Track)

    - + Alt-Ergo 0 655 @@ -153,7 +153,7 @@

    UFDTNIRA (Single Query Track)

    - + Vampire 0 634 @@ -164,7 +164,7 @@

    UFDTNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -186,7 +186,7 @@

    UFDTNIRA (Single Query Track)

    - + Vampire 0 673272515.256130248.70667306736363 @@ -195,7 +195,7 @@

    UFDTNIRA (Single Query Track)

    - + CVC4 0 65920495.32120495.17365906597717 @@ -204,7 +204,7 @@

    UFDTNIRA (Single Query Track)

    - + Alt-Ergo 0 65988984.89578151.13965906597762 @@ -213,7 +213,7 @@

    UFDTNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 02431.8641686.0780007360 @@ -233,7 +233,7 @@

    UFDTNIRA (Single Query Track)

    - + Alt-Ergo 0 00.00.000073662 @@ -242,7 +242,7 @@

    UFDTNIRA (Single Query Track)

    - + Vampire 0 00.00.000073663 @@ -251,7 +251,7 @@

    UFDTNIRA (Single Query Track)

    - + CVC4 0 00.00.000073617 @@ -260,7 +260,7 @@

    UFDTNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.00007360 @@ -280,7 +280,7 @@

    UFDTNIRA (Single Query Track)

    - + Vampire 0 673248515.256106248.70667306736363 @@ -289,7 +289,7 @@

    UFDTNIRA (Single Query Track)

    - + CVC4 0 65916894.12816893.97865906597717 @@ -298,7 +298,7 @@

    UFDTNIRA (Single Query Track)

    - + Alt-Ergo 0 65975782.56264950.31265906597762 @@ -307,7 +307,7 @@

    UFDTNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 02367.3151641.7920007360 @@ -327,7 +327,7 @@

    UFDTNIRA (Single Query Track)

    - + CVC4 0 659503.321503.17365906597717 @@ -336,7 +336,7 @@

    UFDTNIRA (Single Query Track)

    - + Alt-Ergo 0 6492457.9551982.6264906498772 @@ -345,7 +345,7 @@

    UFDTNIRA (Single Query Track)

    - + Vampire 0 29912033.84111295.4282990299437437 @@ -354,7 +354,7 @@

    UFDTNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 02452.5871684.9680007361 @@ -378,7 +378,6 @@

    UFDTNIRA (Single Query Track)

    - + - diff --git a/archive/2020/results/ufdtnira-unsat-core.html b/archive/2020/results/ufdtnira-unsat-core.html index f1b4d8a1..2c6068eb 100644 --- a/archive/2020/results/ufdtnira-unsat-core.html +++ b/archive/2020/results/ufdtnira-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFDTNIRA (Unsat Core Track)

    Competition results for the UFDTNIRA - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    UFDTNIRA (Unsat Core Track)

    Sequential PerformanceParallel Performance CVC4-ucCVC4-uc - - + + @@ -126,7 +126,7 @@

    UFDTNIRA (Unsat Core Track)

    - + CVC4-uc 0 9720 @@ -137,7 +137,7 @@

    UFDTNIRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -159,7 +159,7 @@

    UFDTNIRA (Unsat Core Track)

    - + CVC4-uc 0 97201235.3121235.4231 @@ -168,7 +168,7 @@

    UFDTNIRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0989.786691.1720 @@ -192,7 +192,6 @@

    UFDTNIRA (Unsat Core Track)

    - + - diff --git a/archive/2020/results/uffpdtlira-single-query.html b/archive/2020/results/uffpdtlira-single-query.html index dc333801..abb44d92 100644 --- a/archive/2020/results/uffpdtlira-single-query.html +++ b/archive/2020/results/uffpdtlira-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFFPDTLIRA (Single Query Track)

    Competition results for the UFFPDTLIRA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UFFPDTLIRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) CVC4CVC4CVC4 - - + + CVC4 - - + + CVC4 - + @@ -131,7 +131,7 @@

    UFFPDTLIRA (Single Query Track)

    - + CVC4 0 326 @@ -142,7 +142,7 @@

    UFFPDTLIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -164,7 +164,7 @@

    UFFPDTLIRA (Single Query Track)

    - + CVC4 0 32611852.88311851.02432627299429 @@ -173,7 +173,7 @@

    UFFPDTLIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 01201.35822.1830003680 @@ -193,7 +193,7 @@

    UFFPDTLIRA (Single Query Track)

    - + CVC4 0 27300.606300.63272703419 @@ -202,7 +202,7 @@

    UFFPDTLIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 086.81160.0320003680 @@ -222,7 +222,7 @@

    UFFPDTLIRA (Single Query Track)

    - + CVC4 0 299412.314410.2892990299699 @@ -231,7 +231,7 @@

    UFFPDTLIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0973.589668.5670003680 @@ -251,7 +251,7 @@

    UFFPDTLIRA (Single Query Track)

    - + CVC4 0 320623.477621.35320252954817 @@ -260,7 +260,7 @@

    UFFPDTLIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 01201.35822.1830003680 @@ -284,7 +284,6 @@

    UFFPDTLIRA (Single Query Track)

    - + - diff --git a/archive/2020/results/uffpdtlira-unsat-core.html b/archive/2020/results/uffpdtlira-unsat-core.html index 5df346b8..55bb375c 100644 --- a/archive/2020/results/uffpdtlira-unsat-core.html +++ b/archive/2020/results/uffpdtlira-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFFPDTLIRA (Unsat Core Track)

    Competition results for the UFFPDTLIRA - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    UFFPDTLIRA (Unsat Core Track)

    Sequential PerformanceParallel Performance CVC4-ucCVC4-uc - - + + @@ -126,7 +126,7 @@

    UFFPDTLIRA (Unsat Core Track)

    - + CVC4-uc 0 5540 @@ -137,7 +137,7 @@

    UFFPDTLIRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -159,7 +159,7 @@

    UFFPDTLIRA (Unsat Core Track)

    - + CVC4-uc 0 5540562.985560.1020 @@ -168,7 +168,7 @@

    UFFPDTLIRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0963.922662.4540 @@ -192,7 +192,6 @@

    UFFPDTLIRA (Unsat Core Track)

    - + - diff --git a/archive/2020/results/uffpdtnira-single-query.html b/archive/2020/results/uffpdtnira-single-query.html index 133641bf..8c5d382f 100644 --- a/archive/2020/results/uffpdtnira-single-query.html +++ b/archive/2020/results/uffpdtnira-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFFPDTNIRA (Single Query Track)

    Competition results for the UFFPDTNIRA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UFFPDTNIRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) CVC4CVC4— - - + + CVC4 - - + + CVC4 - + @@ -131,7 +131,7 @@

    UFFPDTNIRA (Single Query Track)

    - + CVC4 0 7 @@ -142,7 +142,7 @@

    UFFPDTNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -164,7 +164,7 @@

    UFFPDTNIRA (Single Query Track)

    - + CVC4 0 77208.3127208.31707146 @@ -173,7 +173,7 @@

    UFFPDTNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 067.44746.426000210 @@ -193,7 +193,7 @@

    UFFPDTNIRA (Single Query Track)

    - + CVC4 0 00.00.0000216 @@ -202,7 +202,7 @@

    UFFPDTNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.0000210 @@ -222,7 +222,7 @@

    UFFPDTNIRA (Single Query Track)

    - + CVC4 0 75.2245.224707146 @@ -231,7 +231,7 @@

    UFFPDTNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 022.54615.51000210 @@ -251,7 +251,7 @@

    UFFPDTNIRA (Single Query Track)

    - + CVC4 0 7152.312152.31707146 @@ -260,7 +260,7 @@

    UFFPDTNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 067.44746.426000210 @@ -284,7 +284,6 @@

    UFFPDTNIRA (Single Query Track)

    - + - diff --git a/archive/2020/results/uffpdtnira-unsat-core.html b/archive/2020/results/uffpdtnira-unsat-core.html index 67be5cb0..46e01d3c 100644 --- a/archive/2020/results/uffpdtnira-unsat-core.html +++ b/archive/2020/results/uffpdtnira-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFFPDTNIRA (Unsat Core Track)

    Competition results for the UFFPDTNIRA - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    UFFPDTNIRA (Unsat Core Track)

    Sequential PerformanceParallel Performance CVC4-ucCVC4-uc - - + + @@ -126,7 +126,7 @@

    UFFPDTNIRA (Unsat Core Track)

    - + CVC4-uc 0 6 @@ -137,7 +137,7 @@

    UFFPDTNIRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -159,7 +159,7 @@

    UFFPDTNIRA (Unsat Core Track)

    - + CVC4-uc 0 60.1990.1990 @@ -168,7 +168,7 @@

    UFFPDTNIRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 06.4924.5460 @@ -192,7 +192,6 @@

    UFFPDTNIRA (Unsat Core Track)

    - + - diff --git a/archive/2020/results/ufidl-single-query.html b/archive/2020/results/ufidl-single-query.html index 700028f9..b5227295 100644 --- a/archive/2020/results/ufidl-single-query.html +++ b/archive/2020/results/ufidl-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFIDL (Single Query Track)

    Competition results for the UFIDL - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UFIDL (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) CVC4CVC4CVC4 - - + + CVC4 - - + + CVC4 - + @@ -131,7 +131,7 @@

    UFIDL (Single Query Track)

    - + 2019-Par4n 0 12 @@ -142,7 +142,7 @@

    UFIDL (Single Query Track)

    - + z3n 0 11 @@ -153,7 +153,7 @@

    UFIDL (Single Query Track)

    - + CVC4 0 10 @@ -164,7 +164,7 @@

    UFIDL (Single Query Track)

    - + SMTInterpol 0 8 @@ -175,7 +175,7 @@

    UFIDL (Single Query Track)

    - + SMTInterpol-fixedn 0 8 @@ -186,7 +186,7 @@

    UFIDL (Single Query Track)

    - + veriT+viten 0 7 @@ -197,7 +197,7 @@

    UFIDL (Single Query Track)

    - + veriT 0 7 @@ -208,7 +208,7 @@

    UFIDL (Single Query Track)

    - + Vampire 0 7 @@ -219,7 +219,7 @@

    UFIDL (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -241,7 +241,7 @@

    UFIDL (Single Query Track)

    - + 2019-Par4n 0 1210618.29510132.642123988 @@ -250,7 +250,7 @@

    UFIDL (Single Query Track)

    - + z3n 0 117792.3727792.837112995 @@ -259,7 +259,7 @@

    UFIDL (Single Query Track)

    - + CVC4 0 1010088.55510170.8021019108 @@ -268,7 +268,7 @@

    UFIDL (Single Query Track)

    - + SMTInterpol 0 88554.8568468.986817127 @@ -277,7 +277,7 @@

    UFIDL (Single Query Track)

    - + SMTInterpol-fixedn 0 88559.1778469.646817127 @@ -286,7 +286,7 @@

    UFIDL (Single Query Track)

    - + veriT+viten 0 712056.83612056.883707139 @@ -295,7 +295,7 @@

    UFIDL (Single Query Track)

    - + Vampire 0 719200.96315599.2587071313 @@ -304,7 +304,7 @@

    UFIDL (Single Query Track)

    - + veriT 0 715600.3115600.3087071312 @@ -313,7 +313,7 @@

    UFIDL (Single Query Track)

    - + UltimateEliminator+MathSAT 0 07250.4547233.584000206 @@ -333,7 +333,7 @@

    UFIDL (Single Query Track)

    - + 2019-Par4n 0 31018.234531.949330178 @@ -342,7 +342,7 @@

    UFIDL (Single Query Track)

    - + z3n 0 21200.0991200.1220185 @@ -351,7 +351,7 @@

    UFIDL (Single Query Track)

    - + CVC4 0 1487.112569.362110198 @@ -360,7 +360,7 @@

    UFIDL (Single Query Track)

    - + SMTInterpol-fixedn 0 11201.481200.874110197 @@ -369,7 +369,7 @@

    UFIDL (Single Query Track)

    - + SMTInterpol 0 11201.5041200.917110197 @@ -378,7 +378,7 @@

    UFIDL (Single Query Track)

    - + UltimateEliminator+MathSAT 0 010.2366.96000206 @@ -387,7 +387,7 @@

    UFIDL (Single Query Track)

    - + veriT+viten 0 01200.0211200.024000209 @@ -396,7 +396,7 @@

    UFIDL (Single Query Track)

    - + veriT 0 03600.03600.00002012 @@ -405,7 +405,7 @@

    UFIDL (Single Query Track)

    - + Vampire 0 03600.03600.00002013 @@ -425,7 +425,7 @@

    UFIDL (Single Query Track)

    - + 2019-Par4n 0 90.0610.692909118 @@ -434,7 +434,7 @@

    UFIDL (Single Query Track)

    - + z3n 0 90.8280.83909115 @@ -443,7 +443,7 @@

    UFIDL (Single Query Track)

    - + CVC4 0 91.4431.441909118 @@ -452,7 +452,7 @@

    UFIDL (Single Query Track)

    - + veriT 0 72400.312400.3087071312 @@ -461,7 +461,7 @@

    UFIDL (Single Query Track)

    - + veriT+viten 0 72400.3072400.326707139 @@ -470,7 +470,7 @@

    UFIDL (Single Query Track)

    - + Vampire 0 72400.7432400.7387071313 @@ -479,7 +479,7 @@

    UFIDL (Single Query Track)

    - + SMTInterpol 0 72539.9442463.015707137 @@ -488,7 +488,7 @@

    UFIDL (Single Query Track)

    - + SMTInterpol-fixedn 0 72544.1032463.659707137 @@ -497,7 +497,7 @@

    UFIDL (Single Query Track)

    - + UltimateEliminator+MathSAT 0 07211.3837207.474000206 @@ -517,7 +517,7 @@

    UFIDL (Single Query Track)

    - + 2019-Par4n 0 11216.075216.828112999 @@ -526,7 +526,7 @@

    UFIDL (Single Query Track)

    - + z3n 0 11216.928216.93112999 @@ -535,7 +535,7 @@

    UFIDL (Single Query Track)

    - + CVC4 0 9218.016218.098909119 @@ -544,7 +544,7 @@

    UFIDL (Single Query Track)

    - + SMTInterpol 0 8322.856236.986817127 @@ -553,7 +553,7 @@

    UFIDL (Single Query Track)

    - + SMTInterpol-fixedn 0 8327.177237.646817127 @@ -562,7 +562,7 @@

    UFIDL (Single Query Track)

    - + veriT+viten 0 7264.328264.3497071310 @@ -571,7 +571,7 @@

    UFIDL (Single Query Track)

    - + veriT 0 7312.31312.3087071312 @@ -580,7 +580,7 @@

    UFIDL (Single Query Track)

    - + Vampire 0 7312.743312.7387071313 @@ -589,7 +589,7 @@

    UFIDL (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0194.454177.584000206 @@ -613,7 +613,6 @@

    UFIDL (Single Query Track)

    - + - diff --git a/archive/2020/results/ufidl-unsat-core.html b/archive/2020/results/ufidl-unsat-core.html index ed2f37ff..74125d89 100644 --- a/archive/2020/results/ufidl-unsat-core.html +++ b/archive/2020/results/ufidl-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFIDL (Unsat Core Track)

    Competition results for the UFIDL - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    UFIDL (Unsat Core Track)

    Sequential PerformanceParallel Performance CVC4-ucCVC4-uc - - + + @@ -126,7 +126,7 @@

    UFIDL (Unsat Core Track)

    - + z3n 0 1916 @@ -137,7 +137,7 @@

    UFIDL (Unsat Core Track)

    - + CVC4-uc 0 1914 @@ -148,7 +148,7 @@

    UFIDL (Unsat Core Track)

    - + SMTInterpol 0 982 @@ -159,7 +159,7 @@

    UFIDL (Unsat Core Track)

    - + SMTInterpol-fixedn 0 982 @@ -170,7 +170,7 @@

    UFIDL (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -192,7 +192,7 @@

    UFIDL (Unsat Core Track)

    - + z3n 0 19161.4031.4180 @@ -201,7 +201,7 @@

    UFIDL (Unsat Core Track)

    - + CVC4-uc 0 19142.1782.1720 @@ -210,7 +210,7 @@

    UFIDL (Unsat Core Track)

    - + SMTInterpol 0 9823631.8673615.7453 @@ -219,7 +219,7 @@

    UFIDL (Unsat Core Track)

    - + SMTInterpol-fixedn 0 9823641.0273615.9293 @@ -228,7 +228,7 @@

    UFIDL (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0109.73972.6080 @@ -252,7 +252,6 @@

    UFIDL (Unsat Core Track)

    - + - diff --git a/archive/2020/results/uflia-single-query.html b/archive/2020/results/uflia-single-query.html index a215af14..b81a6483 100644 --- a/archive/2020/results/uflia-single-query.html +++ b/archive/2020/results/uflia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFLIA (Single Query Track)

    Competition results for the UFLIA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UFLIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) CVC4CVC4SMTInterpol - - + + CVC4 - - + + CVC4 - + @@ -131,7 +131,7 @@

    UFLIA (Single Query Track)

    - + 2019-Par4n 0 1329 @@ -142,7 +142,7 @@

    UFLIA (Single Query Track)

    - + CVC4 0 1307 @@ -153,7 +153,7 @@

    UFLIA (Single Query Track)

    - + veriT 0 1263 @@ -164,7 +164,7 @@

    UFLIA (Single Query Track)

    - + Vampire 0 1223 @@ -175,7 +175,7 @@

    UFLIA (Single Query Track)

    - + veriT+viten 0 1206 @@ -186,7 +186,7 @@

    UFLIA (Single Query Track)

    - + Alt-Ergo 0 1078 @@ -197,7 +197,7 @@

    UFLIA (Single Query Track)

    - + z3n 0 1057 @@ -208,7 +208,7 @@

    UFLIA (Single Query Track)

    - + SMTInterpol 0 284 @@ -219,7 +219,7 @@

    UFLIA (Single Query Track)

    - + SMTInterpol-fixedn 0 284 @@ -230,7 +230,7 @@

    UFLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -252,7 +252,7 @@

    UFLIA (Single Query Track)

    - + 2019-Par4n 0 13451163033.4071134270.494134571338933891 @@ -261,7 +261,7 @@

    UFLIA (Single Query Track)

    - + CVC4 0 13071180355.9981180708.28130721305971955 @@ -270,7 +270,7 @@

    UFLIA (Single Query Track)

    - + veriT 0 12631242635.4391242614.7541263012631015972 @@ -279,7 +279,7 @@

    UFLIA (Single Query Track)

    - + Vampire 0 12551439814.881264098.10812550125510231023 @@ -288,7 +288,7 @@

    UFLIA (Single Query Track)

    - + veriT+viten 0 12061250335.4381250304.12612060120610721029 @@ -297,7 +297,7 @@

    UFLIA (Single Query Track)

    - + Alt-Ergo 0 11121474430.4011370608.81211120111211661094 @@ -306,7 +306,7 @@

    UFLIA (Single Query Track)

    - + z3n 0 10571199972.4181200982.6681057510521221829 @@ -315,7 +315,7 @@

    UFLIA (Single Query Track)

    - + SMTInterpol 0 2842342704.4842328601.656284328119941931 @@ -324,7 +324,7 @@

    UFLIA (Single Query Track)

    - + SMTInterpol-fixedn 0 2842346851.7012328906.414284328119941930 @@ -333,7 +333,7 @@

    UFLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 025174.45422529.603000227814 @@ -353,7 +353,7 @@

    UFLIA (Single Query Track)

    - + 2019-Par4n 0 74117.962176.6317702271891 @@ -362,7 +362,7 @@

    UFLIA (Single Query Track)

    - + z3n 0 53600.2773600.2775502273829 @@ -371,7 +371,7 @@

    UFLIA (Single Query Track)

    - + SMTInterpol 0 34803.7154802.04833022751931 @@ -380,7 +380,7 @@

    UFLIA (Single Query Track)

    - + SMTInterpol-fixedn 0 34803.7474802.08633022751930 @@ -389,7 +389,7 @@

    UFLIA (Single Query Track)

    - + CVC4 0 25270.9815279.3612202276955 @@ -398,7 +398,7 @@

    UFLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 027.81619.201000227814 @@ -407,7 +407,7 @@

    UFLIA (Single Query Track)

    - + Alt-Ergo 0 02401.4132400.49300022781094 @@ -416,7 +416,7 @@

    UFLIA (Single Query Track)

    - + veriT+viten 0 03010.5933010.69800022781029 @@ -425,7 +425,7 @@

    UFLIA (Single Query Track)

    - + veriT 0 06921.9846922.3790002278972 @@ -434,7 +434,7 @@

    UFLIA (Single Query Track)

    - + Vampire 0 09600.09600.000022781023 @@ -454,7 +454,7 @@

    UFLIA (Single Query Track)

    - + 2019-Par4n 0 1338104115.44777293.864133801338940891 @@ -463,7 +463,7 @@

    UFLIA (Single Query Track)

    - + CVC4 0 1305134112.021134453.687130501305973955 @@ -472,7 +472,7 @@

    UFLIA (Single Query Track)

    - + veriT 0 1263194775.559194751.0541263012631015972 @@ -481,7 +481,7 @@

    UFLIA (Single Query Track)

    - + Vampire 0 1255314211.52199752.08812550125510231023 @@ -490,7 +490,7 @@

    UFLIA (Single Query Track)

    - + veriT+viten 0 1206226046.526226014.7212060120610721029 @@ -499,7 +499,7 @@

    UFLIA (Single Query Track)

    - + Alt-Ergo 0 1112451501.515362512.67311120111211661094 @@ -508,7 +508,7 @@

    UFLIA (Single Query Track)

    - + z3n 0 1052344594.655344728.2081052010521226829 @@ -517,7 +517,7 @@

    UFLIA (Single Query Track)

    - + SMTInterpol 0 2811341409.221333181.901281028119971931 @@ -526,7 +526,7 @@

    UFLIA (Single Query Track)

    - + SMTInterpol-fixedn 0 2811339373.041333464.798281028119971930 @@ -535,7 +535,7 @@

    UFLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 021928.03120167.339000227814 @@ -555,7 +555,7 @@

    UFLIA (Single Query Track)

    - + 2019-Par4n 0 127625918.4224805.9871276512711002960 @@ -564,7 +564,7 @@

    UFLIA (Single Query Track)

    - + veriT+viten 0 118527169.12427136.91111850118510931062 @@ -573,7 +573,7 @@

    UFLIA (Single Query Track)

    - + CVC4 0 116128023.31227964.89411610116111171108 @@ -582,7 +582,7 @@

    UFLIA (Single Query Track)

    - + Vampire 0 104737735.65431743.18410470104712311231 @@ -591,7 +591,7 @@

    UFLIA (Single Query Track)

    - + z3n 0 101231547.75731526.22110125100712661257 @@ -600,7 +600,7 @@

    UFLIA (Single Query Track)

    - + Alt-Ergo 0 100038900.41732221.64810000100012781222 @@ -609,7 +609,7 @@

    UFLIA (Single Query Track)

    - + veriT 0 95132349.70832345.11951095113271315 @@ -618,7 +618,7 @@

    UFLIA (Single Query Track)

    - + SMTInterpol 0 26348976.82347809.23263326020151960 @@ -627,7 +627,7 @@

    UFLIA (Single Query Track)

    - + SMTInterpol-fixedn 0 26348968.87947813.358263326020151960 @@ -636,7 +636,7 @@

    UFLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 08750.6855903.693000227816 @@ -660,7 +660,6 @@

    UFLIA (Single Query Track)

    - + - diff --git a/archive/2020/results/uflia-unsat-core.html b/archive/2020/results/uflia-unsat-core.html index 6fabff25..f866b04a 100644 --- a/archive/2020/results/uflia-unsat-core.html +++ b/archive/2020/results/uflia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFLIA (Unsat Core Track)

    Competition results for the UFLIA - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    UFLIA (Unsat Core Track)

    Sequential PerformanceParallel Performance CVC4-ucCVC4-uc - - + + @@ -126,7 +126,7 @@

    UFLIA (Unsat Core Track)

    - + CVC4-uc 0 734528 @@ -137,7 +137,7 @@

    UFLIA (Unsat Core Track)

    - + z3n 0 683837 @@ -148,7 +148,7 @@

    UFLIA (Unsat Core Track)

    - + SMTInterpol-fixedn 0 453685 @@ -159,7 +159,7 @@

    UFLIA (Unsat Core Track)

    - + SMTInterpol 0 453685 @@ -170,7 +170,7 @@

    UFLIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -192,7 +192,7 @@

    UFLIA (Unsat Core Track)

    - + CVC4-uc 0 734528174458.155174436.969136 @@ -201,7 +201,7 @@

    UFLIA (Unsat Core Track)

    - + z3n 0 683837255972.384255984.353149 @@ -210,7 +210,7 @@

    UFLIA (Unsat Core Track)

    - + SMTInterpol-fixedn 0 4536851481760.5441462214.7581204 @@ -219,7 +219,7 @@

    UFLIA (Unsat Core Track)

    - + SMTInterpol 0 4536851482962.141462619.6691204 @@ -228,7 +228,7 @@

    UFLIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 016296.72512366.3194 @@ -252,7 +252,6 @@

    UFLIA (Unsat Core Track)

    - + - diff --git a/archive/2020/results/uflra-incremental.html b/archive/2020/results/uflra-incremental.html index 017cad3b..661141fe 100644 --- a/archive/2020/results/uflra-incremental.html +++ b/archive/2020/results/uflra-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFLRA (Incremental Track)

    Competition results for the UFLRA - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    UFLRA (Incremental Track)

    Parallel Performance CVC4-inc - - + + @@ -124,7 +124,7 @@

    UFLRA (Incremental Track)

    - + 2019-Z3n 0 320367126981.498127163.42524788445 @@ -133,7 +133,7 @@

    UFLRA (Incremental Track)

    - + z3n 0 318483128587.8128416.2424976849 @@ -142,7 +142,7 @@

    UFLRA (Incremental Track)

    - + CVC4-inc 0 11415034510.89534426.7284541015 @@ -151,7 +151,7 @@

    UFLRA (Incremental Track)

    - + SMTInterpol-fixedn 0 107995308557.774303038.54460256179 @@ -160,7 +160,7 @@

    UFLRA (Incremental Track)

    - + SMTInterpol 0 107994308570.494303019.731460257178 @@ -169,7 +169,7 @@

    UFLRA (Incremental Track)

    - + UltimateEliminator+MathSAT 0 02280.391072.0835682510 @@ -193,7 +193,6 @@

    UFLRA (Incremental Track)

    - + - diff --git a/archive/2020/results/uflra-single-query.html b/archive/2020/results/uflra-single-query.html index 15e98754..1ac178aa 100644 --- a/archive/2020/results/uflra-single-query.html +++ b/archive/2020/results/uflra-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFLRA (Single Query Track)

    Competition results for the UFLRA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UFLRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Alt-ErgoAlt-Ergo— - - + + veriT - - + + Alt-Ergo - + @@ -131,7 +131,7 @@

    UFLRA (Single Query Track)

    - + z3n 0 5 @@ -142,7 +142,7 @@

    UFLRA (Single Query Track)

    - + 2018-Z3n 0 5 @@ -153,7 +153,7 @@

    UFLRA (Single Query Track)

    - + veriT+viten 0 2 @@ -164,7 +164,7 @@

    UFLRA (Single Query Track)

    - + Alt-Ergo 0 2 @@ -175,7 +175,7 @@

    UFLRA (Single Query Track)

    - + SMTInterpol-fixedn 0 2 @@ -186,7 +186,7 @@

    UFLRA (Single Query Track)

    - + SMTInterpol 0 2 @@ -197,7 +197,7 @@

    UFLRA (Single Query Track)

    - + CVC4 0 2 @@ -208,7 +208,7 @@

    UFLRA (Single Query Track)

    - + veriT 0 2 @@ -219,7 +219,7 @@

    UFLRA (Single Query Track)

    - + Vampire 0 2 @@ -230,7 +230,7 @@

    UFLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -252,7 +252,7 @@

    UFLRA (Single Query Track)

    - + z3n 0 52769.3972769.48653222 @@ -261,7 +261,7 @@

    UFLRA (Single Query Track)

    - + 2018-Z3n 0 53367.3273367.64853222 @@ -270,7 +270,7 @@

    UFLRA (Single Query Track)

    - + veriT+viten 0 22478.3122478.32420252 @@ -279,7 +279,7 @@

    UFLRA (Single Query Track)

    - + Alt-Ergo 0 23593.8733056.14220252 @@ -288,7 +288,7 @@

    UFLRA (Single Query Track)

    - + SMTInterpol-fixedn 0 24801.7614801.15820254 @@ -297,7 +297,7 @@

    UFLRA (Single Query Track)

    - + SMTInterpol 0 24801.7784801.19420254 @@ -306,7 +306,7 @@

    UFLRA (Single Query Track)

    - + veriT 0 26000.0256000.02920255 @@ -315,7 +315,7 @@

    UFLRA (Single Query Track)

    - + CVC4 0 26000.0346000.03420255 @@ -324,7 +324,7 @@

    UFLRA (Single Query Track)

    - + Vampire 0 26005.86002.50420255 @@ -333,7 +333,7 @@

    UFLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 023.43516.4400070 @@ -353,7 +353,7 @@

    UFLRA (Single Query Track)

    - + z3n 0 32769.3322769.42233042 @@ -362,7 +362,7 @@

    UFLRA (Single Query Track)

    - + 2018-Z3n 0 33367.2543367.57533042 @@ -371,7 +371,7 @@

    UFLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 016.75311.76500070 @@ -380,7 +380,7 @@

    UFLRA (Single Query Track)

    - + veriT+viten 0 02478.2922478.29400072 @@ -389,7 +389,7 @@

    UFLRA (Single Query Track)

    - + Alt-Ergo 0 03593.8213056.08900072 @@ -398,7 +398,7 @@

    UFLRA (Single Query Track)

    - + SMTInterpol-fixedn 0 04800.6364800.40800074 @@ -407,7 +407,7 @@

    UFLRA (Single Query Track)

    - + SMTInterpol 0 04800.644800.43400074 @@ -416,7 +416,7 @@

    UFLRA (Single Query Track)

    - + veriT 0 06000.06000.000075 @@ -425,7 +425,7 @@

    UFLRA (Single Query Track)

    - + Vampire 0 06000.06000.000075 @@ -434,7 +434,7 @@

    UFLRA (Single Query Track)

    - + CVC4 0 06000.06000.000075 @@ -454,7 +454,7 @@

    UFLRA (Single Query Track)

    - + veriT 0 20.0250.02920255 @@ -463,7 +463,7 @@

    UFLRA (Single Query Track)

    - + veriT+viten 0 20.020.0320252 @@ -472,7 +472,7 @@

    UFLRA (Single Query Track)

    - + CVC4 0 20.0340.03420255 @@ -481,7 +481,7 @@

    UFLRA (Single Query Track)

    - + Alt-Ergo 0 20.0530.05320252 @@ -490,7 +490,7 @@

    UFLRA (Single Query Track)

    - + z3n 0 20.0640.06420252 @@ -499,7 +499,7 @@

    UFLRA (Single Query Track)

    - + 2018-Z3n 0 20.0730.07320252 @@ -508,7 +508,7 @@

    UFLRA (Single Query Track)

    - + SMTInterpol-fixedn 0 21.1250.74920254 @@ -517,7 +517,7 @@

    UFLRA (Single Query Track)

    - + SMTInterpol 0 21.1390.7620254 @@ -526,7 +526,7 @@

    UFLRA (Single Query Track)

    - + Vampire 0 25.82.50420255 @@ -535,7 +535,7 @@

    UFLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 06.6824.67400070 @@ -555,7 +555,7 @@

    UFLRA (Single Query Track)

    - + 2018-Z3n 0 495.26395.26342233 @@ -564,7 +564,7 @@

    UFLRA (Single Query Track)

    - + z3n 0 398.10998.10931244 @@ -573,7 +573,7 @@

    UFLRA (Single Query Track)

    - + veriT+viten 0 273.49973.50920253 @@ -582,7 +582,7 @@

    UFLRA (Single Query Track)

    - + Alt-Ergo 0 296.1996.10420254 @@ -591,7 +591,7 @@

    UFLRA (Single Query Track)

    - + SMTInterpol-fixedn 0 297.76197.15820254 @@ -600,7 +600,7 @@

    UFLRA (Single Query Track)

    - + SMTInterpol 0 297.77897.19420254 @@ -609,7 +609,7 @@

    UFLRA (Single Query Track)

    - + veriT 0 2120.025120.02920255 @@ -618,7 +618,7 @@

    UFLRA (Single Query Track)

    - + CVC4 0 2120.034120.03420255 @@ -627,7 +627,7 @@

    UFLRA (Single Query Track)

    - + Vampire 0 2125.8122.50420255 @@ -636,7 +636,7 @@

    UFLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 023.43516.4400070 @@ -660,7 +660,6 @@

    UFLRA (Single Query Track)

    - + - diff --git a/archive/2020/results/uflra-unsat-core.html b/archive/2020/results/uflra-unsat-core.html index 77347a15..7a8d8eaa 100644 --- a/archive/2020/results/uflra-unsat-core.html +++ b/archive/2020/results/uflra-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFLRA (Unsat Core Track)

    Competition results for the UFLRA - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    UFLRA (Unsat Core Track)

    Sequential PerformanceParallel Performance SMTInterpolSMTInterpol - - + + @@ -126,7 +126,7 @@

    UFLRA (Unsat Core Track)

    - + z3n 0 16 @@ -137,7 +137,7 @@

    UFLRA (Unsat Core Track)

    - + SMTInterpol-fixedn 0 16 @@ -148,7 +148,7 @@

    UFLRA (Unsat Core Track)

    - + SMTInterpol 0 16 @@ -159,7 +159,7 @@

    UFLRA (Unsat Core Track)

    - + CVC4-uc 0 14 @@ -170,7 +170,7 @@

    UFLRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -192,7 +192,7 @@

    UFLRA (Unsat Core Track)

    - + z3n 0 160.3430.3480 @@ -201,7 +201,7 @@

    UFLRA (Unsat Core Track)

    - + SMTInterpol-fixedn 0 166.0264.0510 @@ -210,7 +210,7 @@

    UFLRA (Unsat Core Track)

    - + SMTInterpol 0 166.0684.0650 @@ -219,7 +219,7 @@

    UFLRA (Unsat Core Track)

    - + CVC4-uc 0 140.1670.2480 @@ -228,7 +228,7 @@

    UFLRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 034.1623.6370 @@ -252,7 +252,6 @@

    UFLRA (Unsat Core Track)

    - + - diff --git a/archive/2020/results/ufnia-incremental.html b/archive/2020/results/ufnia-incremental.html index eb8d69b3..f00d01c0 100644 --- a/archive/2020/results/ufnia-incremental.html +++ b/archive/2020/results/ufnia-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFNIA (Incremental Track)

    Competition results for the UFNIA - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    UFNIA (Incremental Track)

    Parallel Performance CVC4-inc - - + + @@ -124,7 +124,7 @@

    UFNIA (Incremental Track)

    - + z3n 0 875101093694.7481094030.364217387809 @@ -133,7 +133,7 @@

    UFNIA (Incremental Track)

    - + CVC4-inc 0 34476778363.282778238.824270421637 @@ -142,7 +142,7 @@

    UFNIA (Incremental Track)

    - + UltimateEliminator+MathSAT 0 06677.9493085.2123048970 @@ -166,7 +166,6 @@

    UFNIA (Incremental Track)

    - + - diff --git a/archive/2020/results/ufnia-single-query.html b/archive/2020/results/ufnia-single-query.html index 2e7b0979..20a4b3d4 100644 --- a/archive/2020/results/ufnia-single-query.html +++ b/archive/2020/results/ufnia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFNIA (Single Query Track)

    Competition results for the UFNIA - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UFNIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) CVC4CVC4CVC4 - - + + CVC4 - - + + CVC4 - + @@ -131,7 +131,7 @@

    UFNIA (Single Query Track)

    - + 2019-Par4n 0 3027 @@ -142,7 +142,7 @@

    UFNIA (Single Query Track)

    - + CVC4 0 2789 @@ -153,7 +153,7 @@

    UFNIA (Single Query Track)

    - + z3n 0 2205 @@ -164,7 +164,7 @@

    UFNIA (Single Query Track)

    - + Vampire 0 1742 @@ -175,7 +175,7 @@

    UFNIA (Single Query Track)

    - + 2018-Vampiren 0 1584 @@ -186,7 +186,7 @@

    UFNIA (Single Query Track)

    - + Alt-Ergo 0 790 @@ -197,7 +197,7 @@

    UFNIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 651 @@ -219,7 +219,7 @@

    UFNIA (Single Query Track)

    - + 2019-Par4n 0 30402614010.2842506403.8173040593244719961996 @@ -228,7 +228,7 @@

    UFNIA (Single Query Track)

    - + CVC4 0 27892725980.3752728891.1772789571221822472178 @@ -237,7 +237,7 @@

    UFNIA (Single Query Track)

    - + z3n 0 22052258043.0292258151.0062205510169528311618 @@ -246,7 +246,7 @@

    UFNIA (Single Query Track)

    - + Vampire 0 21534892770.5723796663.41121530215328832883 @@ -255,7 +255,7 @@

    UFNIA (Single Query Track)

    - + Alt-Ergo 0 8333733777.9993097009.735833083342032195 @@ -264,7 +264,7 @@

    UFNIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 651396710.433392097.7256515081434385315 @@ -273,7 +273,7 @@

    UFNIA (Single Query Track)

    - + 2018-Vampiren 1 25557191155.5073798960.90525550255524812480 @@ -293,7 +293,7 @@

    UFNIA (Single Query Track)

    - + 2019-Par4n 0 59337654.34324443.315593593044431996 @@ -302,7 +302,7 @@

    UFNIA (Single Query Track)

    - + CVC4 0 57144280.36344481.478571571044652178 @@ -311,7 +311,7 @@

    UFNIA (Single Query Track)

    - + z3n 0 51093128.87693106.596510510045261618 @@ -320,7 +320,7 @@

    UFNIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 50838306.21937643.39550850804528315 @@ -329,7 +329,7 @@

    UFNIA (Single Query Track)

    - + Alt-Ergo 0 0122946.193119344.30200050362195 @@ -338,7 +338,7 @@

    UFNIA (Single Query Track)

    - + Vampire 0 0740402.4722362.4400050362883 @@ -347,7 +347,7 @@

    UFNIA (Single Query Track)

    - + 2018-Vampiren 1 411074213.89714367.8554104149952480 @@ -367,7 +367,7 @@

    UFNIA (Single Query Track)

    - + 2018-Vampiren 0 25554405103.3271695717.59525550255524812480 @@ -376,7 +376,7 @@

    UFNIA (Single Query Track)

    - + 2019-Par4n 0 24881143609.5741049188.767248841244725481996 @@ -385,7 +385,7 @@

    UFNIA (Single Query Track)

    - + CVC4 0 22581273347.7221276069.433225840221827782178 @@ -394,7 +394,7 @@

    UFNIA (Single Query Track)

    - + Vampire 0 21532721966.1021690761.49121530215328832883 @@ -403,7 +403,7 @@

    UFNIA (Single Query Track)

    - + z3n 0 17321160160.2091160253.836173237169533041618 @@ -412,7 +412,7 @@

    UFNIA (Single Query Track)

    - + Alt-Ergo 0 8332629733.7972099501.74833083342032195 @@ -421,7 +421,7 @@

    UFNIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 180320946.424318238.263180371434856315 @@ -441,7 +441,7 @@

    UFNIA (Single Query Track)

    - + 2019-Par4n 0 257764066.11561782.5822577552202524592459 @@ -450,7 +450,7 @@

    UFNIA (Single Query Track)

    - + CVC4 0 227368490.19568465.2462273513176027632756 @@ -459,7 +459,7 @@

    UFNIA (Single Query Track)

    - + z3n 0 209666464.0766396.9362096487160929402569 @@ -468,7 +468,7 @@

    UFNIA (Single Query Track)

    - + Vampire 0 1221100752.58394032.37412210122138153815 @@ -477,7 +477,7 @@

    UFNIA (Single Query Track)

    - + 2018-Vampiren 0 118598863.66594174.30311850118538513851 @@ -486,7 +486,7 @@

    UFNIA (Single Query Track)

    - + Alt-Ergo 0 68979759.3275276.063689068943472936 @@ -495,7 +495,7 @@

    UFNIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 65024380.14419245.8986505071434386326 @@ -519,7 +519,6 @@

    UFNIA (Single Query Track)

    - + - diff --git a/archive/2020/results/ufnia-unsat-core.html b/archive/2020/results/ufnia-unsat-core.html index 56f9a209..43e639ad 100644 --- a/archive/2020/results/ufnia-unsat-core.html +++ b/archive/2020/results/ufnia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFNIA (Unsat Core Track)

    Competition results for the UFNIA - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    UFNIA (Unsat Core Track)

    Sequential PerformanceParallel Performance CVC4-ucCVC4-uc - - + + @@ -126,7 +126,7 @@

    UFNIA (Unsat Core Track)

    - + CVC4-uc 0 78596 @@ -137,7 +137,7 @@

    UFNIA (Unsat Core Track)

    - + z3n 0 59521 @@ -148,7 +148,7 @@

    UFNIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -170,7 +170,7 @@

    UFNIA (Unsat Core Track)

    - + CVC4-uc 0 78596230536.558230492.754164 @@ -179,7 +179,7 @@

    UFNIA (Unsat Core Track)

    - + z3n 0 59521122437.048122438.37869 @@ -188,7 +188,7 @@

    UFNIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0344799.333340390.374280 @@ -212,7 +212,6 @@

    UFNIA (Unsat Core Track)

    - + - diff --git a/archive/2020/results/ufnra-incremental.html b/archive/2020/results/ufnra-incremental.html index 9cfaf9a7..7322c00f 100644 --- a/archive/2020/results/ufnra-incremental.html +++ b/archive/2020/results/ufnra-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFNRA (Incremental Track)

    Competition results for the UFNRA - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    UFNRA (Incremental Track)

    Parallel Performance CVC4-inc - - + + @@ -124,7 +124,7 @@

    UFNRA (Incremental Track)

    - + z3n 0 2152.932.92100 @@ -133,7 +133,7 @@

    UFNRA (Incremental Track)

    - + CVC4-inc 0 110.5940.5772040 @@ -142,7 +142,7 @@

    UFNRA (Incremental Track)

    - + UltimateEliminator+MathSAT 0 015.627.2942150 @@ -166,7 +166,6 @@

    UFNRA (Incremental Track)

    - + - diff --git a/archive/2020/specs.html b/archive/2020/specs.html index e5f6c2fb..d75f2f4b 100644 --- a/archive/2020/specs.html +++ b/archive/2020/specs.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -96,7 +96,6 @@

    Machine Specifications

    - + - diff --git a/archive/2020/tools.html b/archive/2020/tools.html index 70c1068e..4d6989f3 100644 --- a/archive/2020/tools.html +++ b/archive/2020/tools.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2020 Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -70,8 +70,8 @@

    Tools

    Pre-Processor (Benchmark Scrambler)

    GitHub Repository
    Sources

    SMT-COMP 2020 Releases

      - - + +
    • Single Query Track
      • Binary @@ -79,8 +79,8 @@

        SMT-COMP 2020 Releases

        available on StarExec as SMT-COMP 2020 Single-Query Scrambler (id: 611)
    • - - + +
    • Incremental Track
      • Binary @@ -88,8 +88,8 @@

        SMT-COMP 2020 Releases

        available on StarExec as SMT-COMP 2020 Incremental Scrambler (id: 609)
    • - - + +
    • Unsat Core Track
      • Binary @@ -97,8 +97,8 @@

        SMT-COMP 2020 Releases

        available on StarExec as SMT-COMP 2020 Unsat-Core Scrambler (id: 607)
    • - - + +
    • Model Validation Track
      • Binary @@ -106,15 +106,15 @@

        SMT-COMP 2020 Releases

        available on StarExec as SMT-COMP 2020 Model-Validation Scrambler (id: 610)
    • - - + +

    Post-Processor

    GitHub Repository
    Sources

    SMT-COMP 2020 Releases

      - - + +
    • Single Query Track
      • Binary @@ -122,8 +122,8 @@

        SMT-COMP 2020 Releases

        available on StarExec as SMT-COMP 2020 Single Query 2020 05 27 (id: 632)
    • - - + +
    • Incremental Track
      • Binary @@ -131,8 +131,8 @@

        SMT-COMP 2020 Releases

        available on StarExec as SMT-COMP 2020 Incremental (id: 615)
    • - - + +
    • Unsat Core Track
      • Binary @@ -140,8 +140,8 @@

        SMT-COMP 2020 Releases

        available on StarExec as SMT-COMP 2020 Unsat-Core 2020 06 12 (id: 647)
    • - - + +
    • Model Validation Track
      • Binary @@ -149,8 +149,8 @@

        SMT-COMP 2020 Releases

        available on StarExec as SMT-COMP 2020 Model-Validation (id: 631)
    • - - + +

    Trace executor

    GitHub Repository
    Sources
    Binary
    @@ -158,7 +158,7 @@

    Trace executor

    G
    All solvers wrapped with the Trace executor are available
    here.

    - +

    Competition scripts

    GitHub Repository
    Sources
    Tag
    @@ -174,7 +174,6 @@

    Competition scripts

    Gi - + - diff --git a/archive/2021/benchmarks.html b/archive/2021/benchmarks.html index a79f8b7a..f299915c 100644 --- a/archive/2021/benchmarks.html +++ b/archive/2021/benchmarks.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -656,7 +656,6 @@

    Benchmarks

    - + - diff --git a/archive/2021/divisions/arith.html b/archive/2021/divisions/arith.html index d8ccf57a..61c495c4 100644 --- a/archive/2021/divisions/arith.html +++ b/archive/2021/divisions/arith.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -99,7 +99,6 @@

    Tracks

    - + - diff --git a/archive/2021/divisions/bitvec.html b/archive/2021/divisions/bitvec.html index 425bc92f..a99b20fa 100644 --- a/archive/2021/divisions/bitvec.html +++ b/archive/2021/divisions/bitvec.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -99,7 +99,6 @@

    Tracks

    - + - diff --git a/archive/2021/divisions/equality-lineararith.html b/archive/2021/divisions/equality-lineararith.html index 9a11a7f0..d5e935ed 100644 --- a/archive/2021/divisions/equality-lineararith.html +++ b/archive/2021/divisions/equality-lineararith.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -99,7 +99,6 @@

    Tracks

    - + - diff --git a/archive/2021/divisions/equality-machinearith.html b/archive/2021/divisions/equality-machinearith.html index 0d0e1e18..81365e60 100644 --- a/archive/2021/divisions/equality-machinearith.html +++ b/archive/2021/divisions/equality-machinearith.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -99,7 +99,6 @@

    Tracks

    - + - diff --git a/archive/2021/divisions/equality-nonlineararith.html b/archive/2021/divisions/equality-nonlineararith.html index d77335d0..9bfbe0d7 100644 --- a/archive/2021/divisions/equality-nonlineararith.html +++ b/archive/2021/divisions/equality-nonlineararith.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -99,7 +99,6 @@

    Tracks

    - + - diff --git a/archive/2021/divisions/equality.html b/archive/2021/divisions/equality.html index ef2a7b7e..b335c9a7 100644 --- a/archive/2021/divisions/equality.html +++ b/archive/2021/divisions/equality.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -99,7 +99,6 @@

    Tracks

    - + - diff --git a/archive/2021/divisions/fparith.html b/archive/2021/divisions/fparith.html index 2b90128d..97605035 100644 --- a/archive/2021/divisions/fparith.html +++ b/archive/2021/divisions/fparith.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -99,7 +99,6 @@

    Tracks

    - + - diff --git a/archive/2021/divisions/qf-bitvec.html b/archive/2021/divisions/qf-bitvec.html index 26df0268..f23481d3 100644 --- a/archive/2021/divisions/qf-bitvec.html +++ b/archive/2021/divisions/qf-bitvec.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -102,7 +102,6 @@

    Tracks

    - + - diff --git a/archive/2021/divisions/qf-equality-bitvec-arith.html b/archive/2021/divisions/qf-equality-bitvec-arith.html index a55cc51e..033d2e21 100644 --- a/archive/2021/divisions/qf-equality-bitvec-arith.html +++ b/archive/2021/divisions/qf-equality-bitvec-arith.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -87,7 +87,6 @@

    Tracks

    - + - diff --git a/archive/2021/divisions/qf-equality-bitvec.html b/archive/2021/divisions/qf-equality-bitvec.html index 7d9475d4..91579d8a 100644 --- a/archive/2021/divisions/qf-equality-bitvec.html +++ b/archive/2021/divisions/qf-equality-bitvec.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -102,7 +102,6 @@

    Tracks

    - + - diff --git a/archive/2021/divisions/qf-equality-lineararith.html b/archive/2021/divisions/qf-equality-lineararith.html index d0e07f24..c75f93e0 100644 --- a/archive/2021/divisions/qf-equality-lineararith.html +++ b/archive/2021/divisions/qf-equality-lineararith.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -102,7 +102,6 @@

    Tracks

    - + - diff --git a/archive/2021/divisions/qf-equality-nonlineararith.html b/archive/2021/divisions/qf-equality-nonlineararith.html index 0b492885..2101f27d 100644 --- a/archive/2021/divisions/qf-equality-nonlineararith.html +++ b/archive/2021/divisions/qf-equality-nonlineararith.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -99,7 +99,6 @@

    Tracks

    - + - diff --git a/archive/2021/divisions/qf-equality.html b/archive/2021/divisions/qf-equality.html index cefa87c9..cc7ba63d 100644 --- a/archive/2021/divisions/qf-equality.html +++ b/archive/2021/divisions/qf-equality.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -102,7 +102,6 @@

    Tracks

    - + - diff --git a/archive/2021/divisions/qf-fparith.html b/archive/2021/divisions/qf-fparith.html index e8884948..5e7fb21f 100644 --- a/archive/2021/divisions/qf-fparith.html +++ b/archive/2021/divisions/qf-fparith.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -99,7 +99,6 @@

    Tracks

    - + - diff --git a/archive/2021/divisions/qf-linearintarith.html b/archive/2021/divisions/qf-linearintarith.html index 2687951b..53bd6382 100644 --- a/archive/2021/divisions/qf-linearintarith.html +++ b/archive/2021/divisions/qf-linearintarith.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -102,7 +102,6 @@

    Tracks

    - + - diff --git a/archive/2021/divisions/qf-linearrealarith.html b/archive/2021/divisions/qf-linearrealarith.html index d2d2959d..1412da97 100644 --- a/archive/2021/divisions/qf-linearrealarith.html +++ b/archive/2021/divisions/qf-linearrealarith.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -102,7 +102,6 @@

    Tracks

    - + - diff --git a/archive/2021/divisions/qf-nonlinearintarith.html b/archive/2021/divisions/qf-nonlinearintarith.html index abf9ffd3..90d2acec 100644 --- a/archive/2021/divisions/qf-nonlinearintarith.html +++ b/archive/2021/divisions/qf-nonlinearintarith.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -99,7 +99,6 @@

    Tracks

    - + - diff --git a/archive/2021/divisions/qf-nonlinearrealarith.html b/archive/2021/divisions/qf-nonlinearrealarith.html index 89f6e2e1..2359ed65 100644 --- a/archive/2021/divisions/qf-nonlinearrealarith.html +++ b/archive/2021/divisions/qf-nonlinearrealarith.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -96,7 +96,6 @@

    Tracks

    - + - diff --git a/archive/2021/divisions/qf-strings.html b/archive/2021/divisions/qf-strings.html index 509f92b1..ca4bc523 100644 --- a/archive/2021/divisions/qf-strings.html +++ b/archive/2021/divisions/qf-strings.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -96,7 +96,6 @@

    Tracks

    - + - diff --git a/archive/2021/index.html b/archive/2021/index.html index fed6190d..5c5e1d80 100644 --- a/archive/2021/index.html +++ b/archive/2021/index.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -148,7 +148,6 @@

    Acknowledgment

    - + - diff --git a/archive/2021/news/2020-12-17-pre-announcement.html b/archive/2021/news/2020-12-17-pre-announcement.html index 4a3d593e..79801578 100644 --- a/archive/2021/news/2020-12-17-pre-announcement.html +++ b/archive/2021/news/2020-12-17-pre-announcement.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -112,7 +112,6 @@

    SMT-COMP 2021 pre-announcement

    - + - diff --git a/archive/2021/news/2021-02-05-call-for-benchmark.html b/archive/2021/news/2021-02-05-call-for-benchmark.html index b05c8ad7..250c3959 100644 --- a/archive/2021/news/2021-02-05-call-for-benchmark.html +++ b/archive/2021/news/2021-02-05-call-for-benchmark.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -100,7 +100,6 @@

    Call for Benchmarks

    - + - diff --git a/archive/2021/news/2021-02-05-parallel-and-cloud-tracks.html b/archive/2021/news/2021-02-05-parallel-and-cloud-tracks.html index ccbfc6e4..f014ee05 100644 --- a/archive/2021/news/2021-02-05-parallel-and-cloud-tracks.html +++ b/archive/2021/news/2021-02-05-parallel-and-cloud-tracks.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -156,7 +156,6 @@

    Solver Submission to Par - + - diff --git a/archive/2021/news/2021-02-05-precall-for-solvers.html b/archive/2021/news/2021-02-05-precall-for-solvers.html index bc0208f4..83180eab 100644 --- a/archive/2021/news/2021-02-05-precall-for-solvers.html +++ b/archive/2021/news/2021-02-05-precall-for-solvers.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -128,7 +128,6 @@

    Option print-success is Always Set - + - diff --git a/archive/2021/news/2021-05-07-call-for-solvers.html b/archive/2021/news/2021-05-07-call-for-solvers.html index 8dd53d56..a371cd83 100644 --- a/archive/2021/news/2021-05-07-call-for-solvers.html +++ b/archive/2021/news/2021-05-07-call-for-solvers.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -123,7 +123,6 @@

    Final Call for Solvers

    - + - diff --git a/archive/2021/news/2021-06-01-competing-solvers.html b/archive/2021/news/2021-06-01-competing-solvers.html index d6a2a117..a7a9f100 100644 --- a/archive/2021/news/2021-06-01-competing-solvers.html +++ b/archive/2021/news/2021-06-01-competing-solvers.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -65,7 +65,7 @@

    SMT-COMP 2021

    SMT-COMP Competing Solvers

    01 Jun 2021 -

    The list of solvers registered to compete +

    The list of solvers registered to compete in SMT-COMP 2021 is online. If you submitted a solver, please take the time to check that the information on the website is accurate.

    @@ -86,7 +86,6 @@

    SMT-COMP Competing Solvers

    - + - diff --git a/archive/2021/news/2021-06-18-jobs-running.html b/archive/2021/news/2021-06-18-jobs-running.html index efb2b635..3509c60f 100644 --- a/archive/2021/news/2021-06-18-jobs-running.html +++ b/archive/2021/news/2021-06-18-jobs-running.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -95,7 +95,6 @@

    SMT-COMP 2021 is Live

    - + - diff --git a/archive/2021/news/2021-06-30-excluded-benchmarks.html b/archive/2021/news/2021-06-30-excluded-benchmarks.html index 1bdcd75b..af1a5043 100644 --- a/archive/2021/news/2021-06-30-excluded-benchmarks.html +++ b/archive/2021/news/2021-06-30-excluded-benchmarks.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -101,7 +101,6 @@

    Wrongly Classified Benchmarks

    - + - diff --git a/archive/2021/news/2021-07-18-competition-results.html b/archive/2021/news/2021-07-18-competition-results.html index 61b3b022..81dd218b 100644 --- a/archive/2021/news/2021-07-18-competition-results.html +++ b/archive/2021/news/2021-07-18-competition-results.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -80,7 +80,6 @@

    Competition results

    - + - diff --git a/archive/2021/parallel-and-cloud-tracks.html b/archive/2021/parallel-and-cloud-tracks.html index fcde2d06..c96ef9ad 100644 --- a/archive/2021/parallel-and-cloud-tracks.html +++ b/archive/2021/parallel-and-cloud-tracks.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -142,7 +142,6 @@

    Solver Submission to Par - + - diff --git a/archive/2021/parallel-and-cloud-tracks.md b/archive/2021/parallel-and-cloud-tracks.md index 0305206e..1b5cc5b2 100644 --- a/archive/2021/parallel-and-cloud-tracks.md +++ b/archive/2021/parallel-and-cloud-tracks.md @@ -66,4 +66,3 @@ In order to participate in the Cloud or Parallel Track please send an email to 2. your AWS account number 3. the URL of the GitHub repository including the branch 4. the full, 40-character SHA-1 hash of the commit - diff --git a/archive/2021/participants.html b/archive/2021/participants.html index 81a71780..eb201659 100644 --- a/archive/2021/participants.html +++ b/archive/2021/participants.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -4421,7 +4421,6 @@

    QF_Strings

    - + - diff --git a/archive/2021/participants/2018-cvc4.html b/archive/2021/participants/2018-cvc4.html index 36f103c8..ec9db02a 100644 --- a/archive/2021/participants/2018-cvc4.html +++ b/archive/2021/participants/2018-cvc4.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -140,8 +140,6 @@

    2018-CVC4

    - + - - diff --git a/archive/2021/participants/2018-mathsat-incremental.html b/archive/2021/participants/2018-mathsat-incremental.html index 0d8d0ab7..5da6db09 100644 --- a/archive/2021/participants/2018-mathsat-incremental.html +++ b/archive/2021/participants/2018-mathsat-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -140,8 +140,6 @@

    2018-MathSAT (incremental)

    - + - - diff --git a/archive/2021/participants/2018-yices-incremental.html b/archive/2021/participants/2018-yices-incremental.html index 4c4860aa..e712c483 100644 --- a/archive/2021/participants/2018-yices-incremental.html +++ b/archive/2021/participants/2018-yices-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -140,8 +140,6 @@

    2018-Yices (incremental)

    - + - - diff --git a/archive/2021/participants/2018-yices.html b/archive/2021/participants/2018-yices.html index 3d794cd3..3ce548d2 100644 --- a/archive/2021/participants/2018-yices.html +++ b/archive/2021/participants/2018-yices.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -140,8 +140,6 @@

    2018-Yices

    - + - - diff --git a/archive/2021/participants/2018-z3-incremental.html b/archive/2021/participants/2018-z3-incremental.html index afb0db9d..b8ae45ba 100644 --- a/archive/2021/participants/2018-z3-incremental.html +++ b/archive/2021/participants/2018-z3-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -140,8 +140,6 @@

    2018-Z3 (incremental)

    - + - - diff --git a/archive/2021/participants/2018-z3.html b/archive/2021/participants/2018-z3.html index 33940f6b..276be52b 100644 --- a/archive/2021/participants/2018-z3.html +++ b/archive/2021/participants/2018-z3.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -140,8 +140,6 @@

    2018-Z3

    - + - - diff --git a/archive/2021/participants/2019-cvc4-inc.html b/archive/2021/participants/2019-cvc4-inc.html index 8c21b21c..eefd844a 100644 --- a/archive/2021/participants/2019-cvc4-inc.html +++ b/archive/2021/participants/2019-cvc4-inc.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    2019-CVC4-inc

    - + - - diff --git a/archive/2021/participants/2019-cvc4.html b/archive/2021/participants/2019-cvc4.html index 7569b451..cde1d13d 100644 --- a/archive/2021/participants/2019-cvc4.html +++ b/archive/2021/participants/2019-cvc4.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    2019-CVC4

    - + - - diff --git a/archive/2021/participants/2019-mathsat-default.html b/archive/2021/participants/2019-mathsat-default.html index 3913ab16..d7ccb0eb 100644 --- a/archive/2021/participants/2019-mathsat-default.html +++ b/archive/2021/participants/2019-mathsat-default.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    2019-MathSAT-default

    - + - - diff --git a/archive/2021/participants/2019-par4.html b/archive/2021/participants/2019-par4.html index e8dc98d4..8814f997 100644 --- a/archive/2021/participants/2019-par4.html +++ b/archive/2021/participants/2019-par4.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    2019-Par4

    - + - - diff --git a/archive/2021/participants/2019-smtinterpol.html b/archive/2021/participants/2019-smtinterpol.html index 29a00476..f9c3d32f 100644 --- a/archive/2021/participants/2019-smtinterpol.html +++ b/archive/2021/participants/2019-smtinterpol.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    2019-SMTInterpol

    - + - - diff --git a/archive/2021/participants/2019-yices-2-6-2-incremental.html b/archive/2021/participants/2019-yices-2-6-2-incremental.html index 42da2ed3..435abeea 100644 --- a/archive/2021/participants/2019-yices-2-6-2-incremental.html +++ b/archive/2021/participants/2019-yices-2-6-2-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    2019-Yices 2.6.2 Incremental

    - + - - diff --git a/archive/2021/participants/2019-yices-2-6-2.html b/archive/2021/participants/2019-yices-2-6-2.html index 46616aa6..bb9a782b 100644 --- a/archive/2021/participants/2019-yices-2-6-2.html +++ b/archive/2021/participants/2019-yices-2-6-2.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    2019-Yices 2.6.2

    - + - - diff --git a/archive/2021/participants/2019-z3.html b/archive/2021/participants/2019-z3.html index 58636b1e..4acff459 100644 --- a/archive/2021/participants/2019-z3.html +++ b/archive/2021/participants/2019-z3.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    2019-Z3

    - + - - diff --git a/archive/2021/participants/2020-bitwuzla-fixed.html b/archive/2021/participants/2020-bitwuzla-fixed.html index 34f73e99..08559e0f 100644 --- a/archive/2021/participants/2020-bitwuzla-fixed.html +++ b/archive/2021/participants/2020-bitwuzla-fixed.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    2020-Bitwuzla-fixed

    - + - - diff --git a/archive/2021/participants/2020-bitwuzla.html b/archive/2021/participants/2020-bitwuzla.html index 50e70c94..11e5bfd5 100644 --- a/archive/2021/participants/2020-bitwuzla.html +++ b/archive/2021/participants/2020-bitwuzla.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    2020-Bitwuzla

    - + - - diff --git a/archive/2021/participants/2020-colibri.html b/archive/2021/participants/2020-colibri.html index 901d1d20..7902848e 100644 --- a/archive/2021/participants/2020-colibri.html +++ b/archive/2021/participants/2020-colibri.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    2020-COLIBRI

    - + - - diff --git a/archive/2021/participants/2020-cvc4-inc.html b/archive/2021/participants/2020-cvc4-inc.html index e7b53144..3858e1c3 100644 --- a/archive/2021/participants/2020-cvc4-inc.html +++ b/archive/2021/participants/2020-cvc4-inc.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    2020-CVC4-inc

    - + - - diff --git a/archive/2021/participants/2020-cvc4-uc.html b/archive/2021/participants/2020-cvc4-uc.html index e3eab5e3..266fa6f1 100644 --- a/archive/2021/participants/2020-cvc4-uc.html +++ b/archive/2021/participants/2020-cvc4-uc.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    2020-CVC4-uc

    - + - - diff --git a/archive/2021/participants/2020-cvc4.html b/archive/2021/participants/2020-cvc4.html index ae9b49f0..327ba988 100644 --- a/archive/2021/participants/2020-cvc4.html +++ b/archive/2021/participants/2020-cvc4.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    2020-CVC4

    - + - - diff --git a/archive/2021/participants/2020-mathsat5.html b/archive/2021/participants/2020-mathsat5.html index ac8cc052..ba7408b4 100644 --- a/archive/2021/participants/2020-mathsat5.html +++ b/archive/2021/participants/2020-mathsat5.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    2020-MathSAT5

    - + - - diff --git a/archive/2021/participants/2020-opensmt.html b/archive/2021/participants/2020-opensmt.html index a4f6a36e..51ac33c0 100644 --- a/archive/2021/participants/2020-opensmt.html +++ b/archive/2021/participants/2020-opensmt.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    2020-OpenSMT

    - + - - diff --git a/archive/2021/participants/2020-smt-rat.html b/archive/2021/participants/2020-smt-rat.html index 7f637d84..164017f3 100644 --- a/archive/2021/participants/2020-smt-rat.html +++ b/archive/2021/participants/2020-smt-rat.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    2020-SMT-RAT

    - + - - diff --git a/archive/2021/participants/2020-smtinterpol-fixed.html b/archive/2021/participants/2020-smtinterpol-fixed.html index cbd8fd61..e41c7e60 100644 --- a/archive/2021/participants/2020-smtinterpol-fixed.html +++ b/archive/2021/participants/2020-smtinterpol-fixed.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    2020-SMTInterpol-fixed

    - + - - diff --git a/archive/2021/participants/2020-vampire.html b/archive/2021/participants/2020-vampire.html index 33c317c1..de587364 100644 --- a/archive/2021/participants/2020-vampire.html +++ b/archive/2021/participants/2020-vampire.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    2020-Vampire

    - + - - diff --git a/archive/2021/participants/2020-yices2-fixed-incremental.html b/archive/2021/participants/2020-yices2-fixed-incremental.html index 804dc38d..97b550f8 100644 --- a/archive/2021/participants/2020-yices2-fixed-incremental.html +++ b/archive/2021/participants/2020-yices2-fixed-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    2020-Yices2-fixed incremental

    - + - - diff --git a/archive/2021/participants/2020-yices2-fixed-model-validation.html b/archive/2021/participants/2020-yices2-fixed-model-validation.html index b110b32c..1c7eaea7 100644 --- a/archive/2021/participants/2020-yices2-fixed-model-validation.html +++ b/archive/2021/participants/2020-yices2-fixed-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    2020-Yices2-fixed Model Validation

    - + - - diff --git a/archive/2021/participants/2020-yices2-fixed.html b/archive/2021/participants/2020-yices2-fixed.html index 8e817a7d..b39c455c 100644 --- a/archive/2021/participants/2020-yices2-fixed.html +++ b/archive/2021/participants/2020-yices2-fixed.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    2020-Yices2-fixed

    - + - - diff --git a/archive/2021/participants/2020-yices2-incremental.html b/archive/2021/participants/2020-yices2-incremental.html index 8c721bff..356ecade 100644 --- a/archive/2021/participants/2020-yices2-incremental.html +++ b/archive/2021/participants/2020-yices2-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    2020-Yices2 incremental

    - + - - diff --git a/archive/2021/participants/2020-yices2-model-validation.html b/archive/2021/participants/2020-yices2-model-validation.html index 1f25bb01..6f839088 100644 --- a/archive/2021/participants/2020-yices2-model-validation.html +++ b/archive/2021/participants/2020-yices2-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    2020-Yices2 Model Validation

    - + - - diff --git a/archive/2021/participants/2020-yices2.html b/archive/2021/participants/2020-yices2.html index 2c4292b7..5149b7bd 100644 --- a/archive/2021/participants/2020-yices2.html +++ b/archive/2021/participants/2020-yices2.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    2020-Yices2

    - + - - diff --git a/archive/2021/participants/2020-z3.html b/archive/2021/participants/2020-z3.html index 0c9a6c42..ecf82a12 100644 --- a/archive/2021/participants/2020-z3.html +++ b/archive/2021/participants/2020-z3.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    2020-z3

    - + - - diff --git a/archive/2021/participants/aprove.html b/archive/2021/participants/aprove.html index 0d32578b..013ba2a1 100644 --- a/archive/2021/participants/aprove.html +++ b/archive/2021/participants/aprove.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    AProVE

    - + - - diff --git a/archive/2021/participants/bitwuzla-fixed.html b/archive/2021/participants/bitwuzla-fixed.html index b2e7edab..2c06faf3 100644 --- a/archive/2021/participants/bitwuzla-fixed.html +++ b/archive/2021/participants/bitwuzla-fixed.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    Bitwuzla - fixed

    - + - - diff --git a/archive/2021/participants/bitwuzla.html b/archive/2021/participants/bitwuzla.html index 742b3c33..0dc68fc1 100644 --- a/archive/2021/participants/bitwuzla.html +++ b/archive/2021/participants/bitwuzla.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    Bitwuzla

    - + - - diff --git a/archive/2021/participants/colibri-fixed.html b/archive/2021/participants/colibri-fixed.html index 1daf03b7..93881347 100644 --- a/archive/2021/participants/colibri-fixed.html +++ b/archive/2021/participants/colibri-fixed.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    COLIBRI - fixed

    - + - - diff --git a/archive/2021/participants/colibri.html b/archive/2021/participants/colibri.html index d84eea6f..4599674d 100644 --- a/archive/2021/participants/colibri.html +++ b/archive/2021/participants/colibri.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    COLIBRI

    - + - - diff --git a/archive/2021/participants/cvc5-fixed.html b/archive/2021/participants/cvc5-fixed.html index b11906df..ad68df69 100644 --- a/archive/2021/participants/cvc5-fixed.html +++ b/archive/2021/participants/cvc5-fixed.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    cvc5 - fixed

    - + - - diff --git a/archive/2021/participants/cvc5-gg.html b/archive/2021/participants/cvc5-gg.html index 2b53786f..d761013e 100644 --- a/archive/2021/participants/cvc5-gg.html +++ b/archive/2021/participants/cvc5-gg.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    cvc5-gg

    - + - - diff --git a/archive/2021/participants/cvc5-inc-fixed.html b/archive/2021/participants/cvc5-inc-fixed.html index 0ed021e7..a1caac12 100644 --- a/archive/2021/participants/cvc5-inc-fixed.html +++ b/archive/2021/participants/cvc5-inc-fixed.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    cvc5-inc - fixed

    - + - - diff --git a/archive/2021/participants/cvc5-inc.html b/archive/2021/participants/cvc5-inc.html index 4efeede3..6ba51a65 100644 --- a/archive/2021/participants/cvc5-inc.html +++ b/archive/2021/participants/cvc5-inc.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    cvc5-inc

    - + - - diff --git a/archive/2021/participants/cvc5-mv-fixed.html b/archive/2021/participants/cvc5-mv-fixed.html index de43124b..c0de1eb2 100644 --- a/archive/2021/participants/cvc5-mv-fixed.html +++ b/archive/2021/participants/cvc5-mv-fixed.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    cvc5-mv - fixed

    - + - - diff --git a/archive/2021/participants/cvc5-mv.html b/archive/2021/participants/cvc5-mv.html index c2fb67c2..c27632c3 100644 --- a/archive/2021/participants/cvc5-mv.html +++ b/archive/2021/participants/cvc5-mv.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    cvc5-mv

    - + - - diff --git a/archive/2021/participants/cvc5-uc-fixed.html b/archive/2021/participants/cvc5-uc-fixed.html index d97812a8..0609d5c3 100644 --- a/archive/2021/participants/cvc5-uc-fixed.html +++ b/archive/2021/participants/cvc5-uc-fixed.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    cvc5-uc - fixed

    - + - - diff --git a/archive/2021/participants/cvc5-uc.html b/archive/2021/participants/cvc5-uc.html index 31a8be51..90e5bac6 100644 --- a/archive/2021/participants/cvc5-uc.html +++ b/archive/2021/participants/cvc5-uc.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    cvc5-uc

    - + - - diff --git a/archive/2021/participants/cvc5.html b/archive/2021/participants/cvc5.html index dc23e317..f9228cbb 100644 --- a/archive/2021/participants/cvc5.html +++ b/archive/2021/participants/cvc5.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    cvc5

    - + - - diff --git a/archive/2021/participants/iprover-fixed.html b/archive/2021/participants/iprover-fixed.html index a316e4c1..f0ec71e2 100644 --- a/archive/2021/participants/iprover-fixed.html +++ b/archive/2021/participants/iprover-fixed.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    iProver - fixed

    - + - - diff --git a/archive/2021/participants/iprover-fixed2.html b/archive/2021/participants/iprover-fixed2.html index 9c874dd2..3192d7ab 100644 --- a/archive/2021/participants/iprover-fixed2.html +++ b/archive/2021/participants/iprover-fixed2.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    iProver - fixed2

    - + - - diff --git a/archive/2021/participants/iprover.html b/archive/2021/participants/iprover.html index 4e6dd71b..7f98039e 100644 --- a/archive/2021/participants/iprover.html +++ b/archive/2021/participants/iprover.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    iProver

    - + - - diff --git a/archive/2021/participants/mathsat5.html b/archive/2021/participants/mathsat5.html index 4c574a50..0d4e2e35 100644 --- a/archive/2021/participants/mathsat5.html +++ b/archive/2021/participants/mathsat5.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    MathSAT5

    - + - - diff --git a/archive/2021/participants/mc2.html b/archive/2021/participants/mc2.html index b661d00a..1891e204 100644 --- a/archive/2021/participants/mc2.html +++ b/archive/2021/participants/mc2.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    mc2

    - + - - diff --git a/archive/2021/participants/opensmt-fixed.html b/archive/2021/participants/opensmt-fixed.html index 714f42e3..c5ba4384 100644 --- a/archive/2021/participants/opensmt-fixed.html +++ b/archive/2021/participants/opensmt-fixed.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    OpenSMT - fixed

    - + - - diff --git a/archive/2021/participants/opensmt.html b/archive/2021/participants/opensmt.html index 574ca60b..31c7b35d 100644 --- a/archive/2021/participants/opensmt.html +++ b/archive/2021/participants/opensmt.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    OpenSMT

    - + - - diff --git a/archive/2021/participants/par4.html b/archive/2021/participants/par4.html index 30be78f8..3fa02e8a 100644 --- a/archive/2021/participants/par4.html +++ b/archive/2021/participants/par4.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    Par4

    - + - - diff --git a/archive/2021/participants/smt-rat-mcsat.html b/archive/2021/participants/smt-rat-mcsat.html index 5409bbe3..54418c29 100644 --- a/archive/2021/participants/smt-rat-mcsat.html +++ b/archive/2021/participants/smt-rat-mcsat.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    SMT-RAT-MCSAT

    - + - - diff --git a/archive/2021/participants/smt-rat.html b/archive/2021/participants/smt-rat.html index c6c537bb..f92b74e8 100644 --- a/archive/2021/participants/smt-rat.html +++ b/archive/2021/participants/smt-rat.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    SMT-RAT

    - + - - diff --git a/archive/2021/participants/smtinterpol-remus.html b/archive/2021/participants/smtinterpol-remus.html index f09d45ed..b3f4c5da 100644 --- a/archive/2021/participants/smtinterpol-remus.html +++ b/archive/2021/participants/smtinterpol-remus.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    SMTInterpol-remus

    - + - - diff --git a/archive/2021/participants/smtinterpol.html b/archive/2021/participants/smtinterpol.html index 50ddaeb4..3a9db97c 100644 --- a/archive/2021/participants/smtinterpol.html +++ b/archive/2021/participants/smtinterpol.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    SMTInterpol

    - + - - diff --git a/archive/2021/participants/smts-cube-and-conquer.html b/archive/2021/participants/smts-cube-and-conquer.html index 9028a480..55462cc2 100644 --- a/archive/2021/participants/smts-cube-and-conquer.html +++ b/archive/2021/participants/smts-cube-and-conquer.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    SMTS cube-and-conquer

    - + - - diff --git a/archive/2021/participants/smts-portfolio.html b/archive/2021/participants/smts-portfolio.html index dbc53c3b..342d787f 100644 --- a/archive/2021/participants/smts-portfolio.html +++ b/archive/2021/participants/smts-portfolio.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    SMTS portfolio

    - + - - diff --git a/archive/2021/participants/stp-cms-cloud.html b/archive/2021/participants/stp-cms-cloud.html index bc78c033..77ecf8bc 100644 --- a/archive/2021/participants/stp-cms-cloud.html +++ b/archive/2021/participants/stp-cms-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    STP-CMS-Cloud

    - + - - diff --git a/archive/2021/participants/stp-parallel.html b/archive/2021/participants/stp-parallel.html index ae3535da..14fd8a74 100644 --- a/archive/2021/participants/stp-parallel.html +++ b/archive/2021/participants/stp-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    STP-parallel

    - + - - diff --git a/archive/2021/participants/stp.html b/archive/2021/participants/stp.html index dbf09a0c..0e7cbd4e 100644 --- a/archive/2021/participants/stp.html +++ b/archive/2021/participants/stp.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    STP

    - + - - diff --git a/archive/2021/participants/ultimateeliminator-mathsat.html b/archive/2021/participants/ultimateeliminator-mathsat.html index 70bf9a60..6e93ddb0 100644 --- a/archive/2021/participants/ultimateeliminator-mathsat.html +++ b/archive/2021/participants/ultimateeliminator-mathsat.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    UltimateEliminator+MathSAT

    - + - - diff --git a/archive/2021/participants/vampire-fixed.html b/archive/2021/participants/vampire-fixed.html index ebabe049..86216053 100644 --- a/archive/2021/participants/vampire-fixed.html +++ b/archive/2021/participants/vampire-fixed.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    Vampire - fixed

    - + - - diff --git a/archive/2021/participants/vampire.html b/archive/2021/participants/vampire.html index 3a542fcf..886a4000 100644 --- a/archive/2021/participants/vampire.html +++ b/archive/2021/participants/vampire.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    Vampire

    - + - - diff --git a/archive/2021/participants/verit-rasat-redlog.html b/archive/2021/participants/verit-rasat-redlog.html index 94004c5d..b32970d7 100644 --- a/archive/2021/participants/verit-rasat-redlog.html +++ b/archive/2021/participants/verit-rasat-redlog.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    veriT+raSAT+Redlog

    - + - - diff --git a/archive/2021/participants/verit.html b/archive/2021/participants/verit.html index 1953a6ef..028836a7 100644 --- a/archive/2021/participants/verit.html +++ b/archive/2021/participants/verit.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    veriT

    - + - - diff --git a/archive/2021/participants/yices2-incremental.html b/archive/2021/participants/yices2-incremental.html index 96b4c05c..8ac3bf24 100644 --- a/archive/2021/participants/yices2-incremental.html +++ b/archive/2021/participants/yices2-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    Yices2 incremental

    - + - - diff --git a/archive/2021/participants/yices2-model-validation.html b/archive/2021/participants/yices2-model-validation.html index 2dd5bd3d..77f2728e 100644 --- a/archive/2021/participants/yices2-model-validation.html +++ b/archive/2021/participants/yices2-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    Yices2 model-validation

    - + - - diff --git a/archive/2021/participants/yices2-qs.html b/archive/2021/participants/yices2-qs.html index 690d0d83..06247802 100644 --- a/archive/2021/participants/yices2-qs.html +++ b/archive/2021/participants/yices2-qs.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    Yices2-QS

    - + - - diff --git a/archive/2021/participants/yices2.html b/archive/2021/participants/yices2.html index a2ffb23b..da340806 100644 --- a/archive/2021/participants/yices2.html +++ b/archive/2021/participants/yices2.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    Yices2

    - + - - diff --git a/archive/2021/participants/yicesls.html b/archive/2021/participants/yicesls.html index 42ff7f43..205239cb 100644 --- a/archive/2021/participants/yicesls.html +++ b/archive/2021/participants/yicesls.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    YicesLS

    - + - - diff --git a/archive/2021/participants/z3-mv.html b/archive/2021/participants/z3-mv.html index eabd1dd0..1a18a35b 100644 --- a/archive/2021/participants/z3-mv.html +++ b/archive/2021/participants/z3-mv.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    z3-mv

    - + - - diff --git a/archive/2021/participants/z3.html b/archive/2021/participants/z3.html index 506c5ce2..0e2a8cd3 100644 --- a/archive/2021/participants/z3.html +++ b/archive/2021/participants/z3.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    z3

    - + - - diff --git a/archive/2021/participants/z3str4-fixed.html b/archive/2021/participants/z3str4-fixed.html index 2e08505b..899d34c2 100644 --- a/archive/2021/participants/z3str4-fixed.html +++ b/archive/2021/participants/z3str4-fixed.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    Z3str4 - fixed

    - + - - diff --git a/archive/2021/participants/z3str4.html b/archive/2021/participants/z3str4.html index 673411b7..592b7d27 100644 --- a/archive/2021/participants/z3str4.html +++ b/archive/2021/participants/z3str4.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -140,8 +140,6 @@

    Z3str4

    - + - - diff --git a/archive/2021/results.html b/archive/2021/results.html index a8984ad4..0c368162 100644 --- a/archive/2021/results.html +++ b/archive/2021/results.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -154,243 +154,243 @@

    Tracks Summary

    Divisions

    @@ -405,7 +405,6 @@

    Divisions

    - + - diff --git a/archive/2021/results/abv-single-query.html b/archive/2021/results/abv-single-query.html index a27f8b18..98a03ace 100644 --- a/archive/2021/results/abv-single-query.html +++ b/archive/2021/results/abv-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    ABV (Single Query Track)

    Competition results for the ABV - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    ABV (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    ABV (Single Query Track)

    - + z3n 0 45 @@ -142,7 +142,7 @@

    ABV (Single Query Track)

    - + 2020-z3n 0 41 @@ -153,7 +153,7 @@

    ABV (Single Query Track)

    - + cvc5 - fixedn 0 34 @@ -164,7 +164,7 @@

    ABV (Single Query Track)

    - + cvc5 0 34 @@ -175,7 +175,7 @@

    ABV (Single Query Track)

    - + 2020-CVC4n 0 19 @@ -186,7 +186,7 @@

    ABV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 1 @@ -208,7 +208,7 @@

    ABV (Single Query Track)

    - + z3n 0 45114690.974114716.65345242112492 @@ -217,7 +217,7 @@

    ABV (Single Query Track)

    - + 2020-z3n 0 41118525.118118525.71141241712895 @@ -226,7 +226,7 @@

    ABV (Single Query Track)

    - + cvc5 - fixedn 0 34127192.28128357.86934122213585 @@ -235,7 +235,7 @@

    ABV (Single Query Track)

    - + cvc5 0 34130821.016131617.26434122213592 @@ -244,7 +244,7 @@

    ABV (Single Query Track)

    - + 2020-CVC4n 0 197.577.512196131500 @@ -253,7 +253,7 @@

    ABV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 16804.9816589.4941011683 @@ -273,7 +273,7 @@

    ABV (Single Query Track)

    - + 2020-z3n 0 243619.0533619.05624240414195 @@ -282,7 +282,7 @@

    ABV (Single Query Track)

    - + z3n 0 244801.4714801.47224240414192 @@ -291,7 +291,7 @@

    ABV (Single Query Track)

    - + cvc5 0 1214208.99914244.543121201614192 @@ -300,7 +300,7 @@

    ABV (Single Query Track)

    - + cvc5 - fixedn 0 1214722.5214785.694121201614185 @@ -309,7 +309,7 @@

    ABV (Single Query Track)

    - + 2020-CVC4n 0 61.6281.618660221410 @@ -318,7 +318,7 @@

    ABV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 01333.591280.331000281413 @@ -338,7 +338,7 @@

    ABV (Single Query Track)

    - + cvc5 - fixedn 0 222873.8632935.64922022314485 @@ -347,7 +347,7 @@

    ABV (Single Query Track)

    - + cvc5 0 223471.4143504.44522022314492 @@ -356,7 +356,7 @@

    ABV (Single Query Track)

    - + z3n 0 212981.0522981.15121021414492 @@ -365,7 +365,7 @@

    ABV (Single Query Track)

    - + 2020-z3n 0 178220.3798220.40617017814495 @@ -374,7 +374,7 @@

    ABV (Single Query Track)

    - + 2020-CVC4n 0 132.8552.84613013121440 @@ -383,7 +383,7 @@

    ABV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 1608.125474.904101241443 @@ -403,7 +403,7 @@

    ABV (Single Query Track)

    - + z3n 0 452742.7122742.734452421124107 @@ -412,7 +412,7 @@

    ABV (Single Query Track)

    - + 2020-z3n 0 402894.032894.042402416129114 @@ -421,7 +421,7 @@

    ABV (Single Query Track)

    - + cvc5 - fixedn 0 323298.9733298.954321220137137 @@ -430,7 +430,7 @@

    ABV (Single Query Track)

    - + cvc5 0 323299.0983299.087321220137137 @@ -439,7 +439,7 @@

    ABV (Single Query Track)

    - + 2020-CVC4n 0 197.577.512196131500 @@ -448,7 +448,7 @@

    ABV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 01047.999730.93500016910 @@ -472,7 +472,6 @@

    ABV (Single Query Track)

    - + - diff --git a/archive/2021/results/abv-unsat-core.html b/archive/2021/results/abv-unsat-core.html index e15d460f..4bbf9312 100644 --- a/archive/2021/results/abv-unsat-core.html +++ b/archive/2021/results/abv-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    ABV (Unsat Core Track)

    Competition results for the ABV - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    ABV (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5-uccvc5-uc - - + + @@ -126,7 +126,7 @@

    ABV (Unsat Core Track)

    - + 2020-CVC4-ucn 0 3 @@ -137,7 +137,7 @@

    ABV (Unsat Core Track)

    - + cvc5-uc 0 3 @@ -148,7 +148,7 @@

    ABV (Unsat Core Track)

    - + z3n 0 3 @@ -159,7 +159,7 @@

    ABV (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -181,7 +181,7 @@

    ABV (Unsat Core Track)

    - + cvc5-uc 0 30.060.0590 @@ -190,7 +190,7 @@

    ABV (Unsat Core Track)

    - + 2020-CVC4-ucn 0 30.0550.0610 @@ -199,7 +199,7 @@

    ABV (Unsat Core Track)

    - + z3n 0 382.26882.2750 @@ -208,7 +208,7 @@

    ABV (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 09.2445.4730 @@ -232,7 +232,6 @@

    ABV (Unsat Core Track)

    - + - diff --git a/archive/2021/results/abvfp-single-query.html b/archive/2021/results/abvfp-single-query.html index 6dc7af9b..3810ef4b 100644 --- a/archive/2021/results/abvfp-single-query.html +++ b/archive/2021/results/abvfp-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    ABVFP (Single Query Track)

    Competition results for the ABVFP - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    ABVFP (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    ABVFP (Single Query Track)

    - + cvc5 - fixedn 0 17 @@ -142,7 +142,7 @@

    ABVFP (Single Query Track)

    - + cvc5 0 17 @@ -153,7 +153,7 @@

    ABVFP (Single Query Track)

    - + 2020-CVC4n 0 15 @@ -164,7 +164,7 @@

    ABVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 10 @@ -186,7 +186,7 @@

    ABVFP (Single Query Track)

    - + cvc5 0 1715604.3115604.307171521313 @@ -195,7 +195,7 @@

    ABVFP (Single Query Track)

    - + cvc5 - fixedn 0 1715604.32715604.32171521313 @@ -204,7 +204,7 @@

    ABVFP (Single Query Track)

    - + 2020-CVC4n 0 1512714.10412714.442151321510 @@ -213,7 +213,7 @@

    ABVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 10290.408226.56310100200 @@ -233,7 +233,7 @@

    ABVFP (Single Query Track)

    - + cvc5 0 153.9733.9711515001513 @@ -242,7 +242,7 @@

    ABVFP (Single Query Track)

    - + cvc5 - fixedn 0 154.03.9941515001513 @@ -251,7 +251,7 @@

    ABVFP (Single Query Track)

    - + 2020-CVC4n 0 133.6623.6581313021510 @@ -260,7 +260,7 @@

    ABVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 10216.596183.258101005150 @@ -280,7 +280,7 @@

    ABVFP (Single Query Track)

    - + cvc5 - fixedn 0 20.3270.32620202813 @@ -289,7 +289,7 @@

    ABVFP (Single Query Track)

    - + cvc5 0 20.3370.33620202813 @@ -298,7 +298,7 @@

    ABVFP (Single Query Track)

    - + 2020-CVC4n 0 20.3730.37520202810 @@ -307,7 +307,7 @@

    ABVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 09.7115.5930002280 @@ -327,7 +327,7 @@

    ABVFP (Single Query Track)

    - + cvc5 0 17316.31316.307171521313 @@ -336,7 +336,7 @@

    ABVFP (Single Query Track)

    - + cvc5 - fixedn 0 17316.327316.32171521313 @@ -345,7 +345,7 @@

    ABVFP (Single Query Track)

    - + 2020-CVC4n 0 15316.036316.033151321513 @@ -354,7 +354,7 @@

    ABVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 8229.686171.499880222 @@ -378,7 +378,6 @@

    ABVFP (Single Query Track)

    - + - diff --git a/archive/2021/results/abvfplra-single-query.html b/archive/2021/results/abvfplra-single-query.html index 8746d2d0..6c125fbe 100644 --- a/archive/2021/results/abvfplra-single-query.html +++ b/archive/2021/results/abvfplra-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    ABVFPLRA (Single Query Track)

    Competition results for the ABVFPLRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    ABVFPLRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    ABVFPLRA (Single Query Track)

    - + cvc5 0 22 @@ -142,7 +142,7 @@

    ABVFPLRA (Single Query Track)

    - + cvc5 - fixedn 0 22 @@ -153,7 +153,7 @@

    ABVFPLRA (Single Query Track)

    - + 2020-CVC4n 0 20 @@ -164,7 +164,7 @@

    ABVFPLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 10 @@ -186,7 +186,7 @@

    ABVFPLRA (Single Query Track)

    - + cvc5 - fixedn 0 2254336.68654340.942221845345 @@ -195,7 +195,7 @@

    ABVFPLRA (Single Query Track)

    - + cvc5 0 2254336.83254341.734221845345 @@ -204,7 +204,7 @@

    ABVFPLRA (Single Query Track)

    - + 2020-CVC4n 0 2014019.04814019.247201825511 @@ -213,7 +213,7 @@

    ABVFPLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 101654.831518.18610100650 @@ -233,7 +233,7 @@

    ABVFPLRA (Single Query Track)

    - + cvc5 - fixedn 0 181.6921.6821818005745 @@ -242,7 +242,7 @@

    ABVFPLRA (Single Query Track)

    - + cvc5 0 181.751.7451818005745 @@ -251,7 +251,7 @@

    ABVFPLRA (Single Query Track)

    - + 2020-CVC4n 0 182.5772.581818005711 @@ -260,7 +260,7 @@

    ABVFPLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 10165.735113.543101008570 @@ -280,7 +280,7 @@

    ABVFPLRA (Single Query Track)

    - + cvc5 - fixedn 0 426.65126.66340407145 @@ -289,7 +289,7 @@

    ABVFPLRA (Single Query Track)

    - + cvc5 0 426.69926.69940407145 @@ -298,7 +298,7 @@

    ABVFPLRA (Single Query Track)

    - + 2020-CVC4n 0 210.65810.65820227111 @@ -307,7 +307,7 @@

    ABVFPLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 018.1112.5730004710 @@ -327,7 +327,7 @@

    ABVFPLRA (Single Query Track)

    - + cvc5 - fixedn 0 211130.6921130.677211835447 @@ -336,7 +336,7 @@

    ABVFPLRA (Single Query Track)

    - + cvc5 0 211130.7711131.251211835447 @@ -345,7 +345,7 @@

    ABVFPLRA (Single Query Track)

    - + 2020-CVC4n 0 20328.833328.825201825513 @@ -354,7 +354,7 @@

    ABVFPLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 10478.83342.18610100650 @@ -378,7 +378,6 @@

    ABVFPLRA (Single Query Track)

    - + - diff --git a/archive/2021/results/alia-incremental.html b/archive/2021/results/alia-incremental.html index 458d669d..f7681ae8 100644 --- a/archive/2021/results/alia-incremental.html +++ b/archive/2021/results/alia-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    ALIA (Incremental Track)

    Competition results for the ALIA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    ALIA (Incremental Track)

    Parallel Performance SMTInterpol - - + + @@ -124,7 +124,7 @@

    ALIA (Incremental Track)

    - + 2020-z3n 0 202552152.962139.60700 @@ -133,7 +133,7 @@

    ALIA (Incremental Track)

    - + z3n 0 202552178.037164.37800 @@ -142,7 +142,7 @@

    ALIA (Incremental Track)

    - + SMTInterpol 0 202536959.48403.753160 @@ -151,7 +151,7 @@

    ALIA (Incremental Track)

    - + cvc5-inc 0 202423585.285541.3541290 @@ -160,7 +160,7 @@

    ALIA (Incremental Track)

    - + UltimateEliminator+MathSAT 0 18782510831.3819639.057147273 @@ -184,7 +184,6 @@

    ALIA (Incremental Track)

    - + - diff --git a/archive/2021/results/alia-single-query.html b/archive/2021/results/alia-single-query.html index bfd9288e..5ab45709 100644 --- a/archive/2021/results/alia-single-query.html +++ b/archive/2021/results/alia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    ALIA (Single Query Track)

    Competition results for the ALIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    ALIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) SMTInterpolSMTInterpolSMTInterpol - - + + cvc5 - - + + SMTInterpol - + @@ -131,7 +131,7 @@

    ALIA (Single Query Track)

    - + z3n 0 19 @@ -142,7 +142,7 @@

    ALIA (Single Query Track)

    - + 2018-Z3n 0 19 @@ -153,7 +153,7 @@

    ALIA (Single Query Track)

    - + 2020-z3n 0 19 @@ -164,7 +164,7 @@

    ALIA (Single Query Track)

    - + SMTInterpol 0 19 @@ -175,7 +175,7 @@

    ALIA (Single Query Track)

    - + cvc5 0 18 @@ -186,7 +186,7 @@

    ALIA (Single Query Track)

    - + cvc5 - fixedn 0 18 @@ -197,7 +197,7 @@

    ALIA (Single Query Track)

    - + 2020-CVC4n 0 18 @@ -208,7 +208,7 @@

    ALIA (Single Query Track)

    - + 2020-Vampiren 0 15 @@ -219,7 +219,7 @@

    ALIA (Single Query Track)

    - + Vampire - fixedn 0 14 @@ -230,7 +230,7 @@

    ALIA (Single Query Track)

    - + Vampire 0 14 @@ -241,7 +241,7 @@

    ALIA (Single Query Track)

    - + veriT 0 4 @@ -252,7 +252,7 @@

    ALIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 1 @@ -274,7 +274,7 @@

    ALIA (Single Query Track)

    - + z3n 0 190.9350.9381911800 @@ -283,7 +283,7 @@

    ALIA (Single Query Track)

    - + 2018-Z3n 0 191.2091.2111911800 @@ -292,7 +292,7 @@

    ALIA (Single Query Track)

    - + 2020-z3n 0 191.2951.3011911800 @@ -301,7 +301,7 @@

    ALIA (Single Query Track)

    - + SMTInterpol 0 19150.02759.6111911800 @@ -310,7 +310,7 @@

    ALIA (Single Query Track)

    - + cvc5 0 184.2414.2351801810 @@ -319,7 +319,7 @@

    ALIA (Single Query Track)

    - + cvc5 - fixedn 0 184.3074.2971801810 @@ -328,7 +328,7 @@

    ALIA (Single Query Track)

    - + 2020-CVC4n 0 185.0525.0441801810 @@ -337,7 +337,7 @@

    ALIA (Single Query Track)

    - + 2020-Vampiren 0 168364.8224957.9521601633 @@ -346,7 +346,7 @@

    ALIA (Single Query Track)

    - + Vampire - fixedn 0 158183.6565823.4251501544 @@ -355,7 +355,7 @@

    ALIA (Single Query Track)

    - + Vampire 0 158705.2395927.2261501544 @@ -364,7 +364,7 @@

    ALIA (Single Query Track)

    - + veriT 0 418000.05918000.1424041515 @@ -373,7 +373,7 @@

    ALIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 1174.068169.979110180 @@ -393,7 +393,7 @@

    ALIA (Single Query Track)

    - + z3n 0 10.0370.0371100180 @@ -402,7 +402,7 @@

    ALIA (Single Query Track)

    - + 2020-z3n 0 10.0520.0521100180 @@ -411,7 +411,7 @@

    ALIA (Single Query Track)

    - + 2018-Z3n 0 10.0550.0561100180 @@ -420,7 +420,7 @@

    ALIA (Single Query Track)

    - + SMTInterpol 0 10.680.7591100180 @@ -429,7 +429,7 @@

    ALIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 15.1082.9421100180 @@ -438,7 +438,7 @@

    ALIA (Single Query Track)

    - + cvc5 - fixedn 0 00.2940.290001180 @@ -447,7 +447,7 @@

    ALIA (Single Query Track)

    - + cvc5 0 00.2890.2920001180 @@ -456,7 +456,7 @@

    ALIA (Single Query Track)

    - + 2020-CVC4n 0 00.3040.3010001180 @@ -465,7 +465,7 @@

    ALIA (Single Query Track)

    - + 2020-Vampiren 0 01200.01200.00001183 @@ -474,7 +474,7 @@

    ALIA (Single Query Track)

    - + Vampire 0 01200.01200.00001184 @@ -483,7 +483,7 @@

    ALIA (Single Query Track)

    - + veriT 0 01200.01200.000011815 @@ -492,7 +492,7 @@

    ALIA (Single Query Track)

    - + Vampire - fixedn 0 01200.01200.00001184 @@ -512,7 +512,7 @@

    ALIA (Single Query Track)

    - + z3n 0 180.8980.918018010 @@ -521,7 +521,7 @@

    ALIA (Single Query Track)

    - + 2018-Z3n 0 181.1531.15618018010 @@ -530,7 +530,7 @@

    ALIA (Single Query Track)

    - + 2020-z3n 0 181.2431.24918018010 @@ -539,7 +539,7 @@

    ALIA (Single Query Track)

    - + cvc5 0 183.9513.94318018010 @@ -548,7 +548,7 @@

    ALIA (Single Query Track)

    - + cvc5 - fixedn 0 184.0134.00618018010 @@ -557,7 +557,7 @@

    ALIA (Single Query Track)

    - + 2020-CVC4n 0 184.7484.74318018010 @@ -566,7 +566,7 @@

    ALIA (Single Query Track)

    - + SMTInterpol 0 18149.34758.85218018010 @@ -575,7 +575,7 @@

    ALIA (Single Query Track)

    - + 2020-Vampiren 0 167164.8223757.95216016213 @@ -584,7 +584,7 @@

    ALIA (Single Query Track)

    - + Vampire - fixedn 0 156983.6564623.42515015314 @@ -593,7 +593,7 @@

    ALIA (Single Query Track)

    - + Vampire 0 157505.2394727.22615015314 @@ -602,7 +602,7 @@

    ALIA (Single Query Track)

    - + veriT 0 416800.05916800.14240414115 @@ -611,7 +611,7 @@

    ALIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0168.961167.0370001810 @@ -631,7 +631,7 @@

    ALIA (Single Query Track)

    - + z3n 0 190.9350.9381911800 @@ -640,7 +640,7 @@

    ALIA (Single Query Track)

    - + 2018-Z3n 0 191.2091.2111911800 @@ -649,7 +649,7 @@

    ALIA (Single Query Track)

    - + 2020-z3n 0 191.2951.3011911800 @@ -658,7 +658,7 @@

    ALIA (Single Query Track)

    - + SMTInterpol 0 19150.02759.6111911800 @@ -667,7 +667,7 @@

    ALIA (Single Query Track)

    - + cvc5 0 184.2414.2351801810 @@ -676,7 +676,7 @@

    ALIA (Single Query Track)

    - + cvc5 - fixedn 0 184.3074.2971801810 @@ -685,7 +685,7 @@

    ALIA (Single Query Track)

    - + 2020-CVC4n 0 185.0525.0441801810 @@ -694,7 +694,7 @@

    ALIA (Single Query Track)

    - + 2020-Vampiren 0 9275.632255.5999091010 @@ -703,7 +703,7 @@

    ALIA (Single Query Track)

    - + Vampire 0 8289.462276.7328081111 @@ -712,7 +712,7 @@

    ALIA (Single Query Track)

    - + Vampire - fixedn 0 7301.556294.8337071212 @@ -721,7 +721,7 @@

    ALIA (Single Query Track)

    - + veriT 0 4360.059360.1424041515 @@ -730,7 +730,7 @@

    ALIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 1185.87897.985110181 @@ -754,7 +754,6 @@

    ALIA (Single Query Track)

    - + - diff --git a/archive/2021/results/ania-incremental.html b/archive/2021/results/ania-incremental.html index 982894fb..d5dc2f2d 100644 --- a/archive/2021/results/ania-incremental.html +++ b/archive/2021/results/ania-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    ANIA (Incremental Track)

    Competition results for the ANIA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    ANIA (Incremental Track)

    Parallel Performance SMTInterpol - - + + @@ -124,7 +124,7 @@

    ANIA (Incremental Track)

    - + 2020-CVC4-incn 0 23488555.461552.64100 @@ -133,7 +133,7 @@

    ANIA (Incremental Track)

    - + SMTInterpol 0 23486130.86846.03320 @@ -142,7 +142,7 @@

    ANIA (Incremental Track)

    - + cvc5-inc 0 2348656.37453.78720 @@ -151,7 +151,7 @@

    ANIA (Incremental Track)

    - + UltimateEliminator+MathSAT 0 130233162.43122.431104652 @@ -175,7 +175,6 @@

    ANIA (Incremental Track)

    - + - diff --git a/archive/2021/results/arith-cloud.html b/archive/2021/results/arith-cloud.html index c983bfeb..01722213 100644 --- a/archive/2021/results/arith-cloud.html +++ b/archive/2021/results/arith-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    Arith (Cloud Track)

    Competition results for the Arith - + division - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    Arith (Cloud Track)

    - + Par4n 0 615460.07762412112 @@ -126,7 +126,7 @@

    Arith (Cloud Track)

    - + Vampire 0 515795.46250513113 @@ -135,7 +135,7 @@

    Arith (Cloud Track)

    - + cvc5-gg 0 00.00001900 @@ -155,7 +155,7 @@

    Arith (Cloud Track)

    - + Par4n 0 21801.02522011612 @@ -164,7 +164,7 @@

    Arith (Cloud Track)

    - + cvc5-gg 0 00.00004150 @@ -173,7 +173,7 @@

    Arith (Cloud Track)

    - + Vampire 0 04800.000041513 @@ -193,7 +193,7 @@

    Arith (Cloud Track)

    - + Vampire 0 57395.4625056813 @@ -202,7 +202,7 @@

    Arith (Cloud Track)

    - + Par4n 0 410059.0524048712 @@ -211,7 +211,7 @@

    Arith (Cloud Track)

    - + cvc5-gg 0 00.00001270 @@ -231,7 +231,7 @@

    Arith (Cloud Track)

    - + Par4n 0 3361.79831215115 @@ -240,7 +240,7 @@

    Arith (Cloud Track)

    - + Vampire 0 1409.00510117117 @@ -249,7 +249,7 @@

    Arith (Cloud Track)

    - + cvc5-gg 0 00.00001900 @@ -273,7 +273,6 @@

    Arith (Cloud Track)

    - + - diff --git a/archive/2021/results/arith-incremental.html b/archive/2021/results/arith-incremental.html index f2ecb13b..2fc7a80a 100644 --- a/archive/2021/results/arith-incremental.html +++ b/archive/2021/results/arith-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    Arith (Incremental Track)

    Competition results for the Arith - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    Arith (Incremental Track)

    Parallel Performance cvc5-inc - - + + @@ -124,7 +124,7 @@

    Arith (Incremental Track)

    - + cvc5-inc 0 4136299.72994.128000 @@ -133,7 +133,7 @@

    Arith (Incremental Track)

    - + 2020-CVC4-incn 0 41362189.38187.251000 @@ -142,7 +142,7 @@

    Arith (Incremental Track)

    - + UltimateEliminator+MathSAT 0 41362744.613496.959000 @@ -151,7 +151,7 @@

    Arith (Incremental Track)

    - + 2020-z3n 0 389783620.8173618.455238403 @@ -160,7 +160,7 @@

    Arith (Incremental Track)

    - + z3n 0 389773623.6563622.429238503 @@ -169,7 +169,7 @@

    Arith (Incremental Track)

    - + SMTInterpol 0 381101314.9461241.514325201 @@ -193,7 +193,6 @@

    Arith (Incremental Track)

    - + - diff --git a/archive/2021/results/arith-parallel.html b/archive/2021/results/arith-parallel.html index 81b4cd34..fa64c151 100644 --- a/archive/2021/results/arith-parallel.html +++ b/archive/2021/results/arith-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    Arith (Parallel Track)

    Competition results for the Arith - + division - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    Arith (Parallel Track)

    - + Par4n 0 616666.69862413113 @@ -126,7 +126,7 @@

    Arith (Parallel Track)

    - + Vampire 0 518158.40250515015 @@ -135,7 +135,7 @@

    Arith (Parallel Track)

    - + cvc5-gg 0 00.00002000 @@ -155,7 +155,7 @@

    Arith (Parallel Track)

    - + Par4n 0 21809.59722011713 @@ -164,7 +164,7 @@

    Arith (Parallel Track)

    - + cvc5-gg 0 00.00004160 @@ -173,7 +173,7 @@

    Arith (Parallel Track)

    - + Vampire 0 04800.000041615 @@ -193,7 +193,7 @@

    Arith (Parallel Track)

    - + Vampire 0 59758.4025058715 @@ -202,7 +202,7 @@

    Arith (Parallel Track)

    - + Par4n 0 411257.1014049713 @@ -211,7 +211,7 @@

    Arith (Parallel Track)

    - + cvc5-gg 0 00.00001370 @@ -231,7 +231,7 @@

    Arith (Parallel Track)

    - + Par4n 0 3385.85131216116 @@ -240,7 +240,7 @@

    Arith (Parallel Track)

    - + Vampire 0 1456.49910119019 @@ -249,7 +249,7 @@

    Arith (Parallel Track)

    - + cvc5-gg 0 00.00002000 @@ -273,7 +273,6 @@

    Arith (Parallel Track)

    - + - diff --git a/archive/2021/results/arith-single-query.html b/archive/2021/results/arith-single-query.html index ee5f7e0f..4ef7f5e7 100644 --- a/archive/2021/results/arith-single-query.html +++ b/archive/2021/results/arith-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    Arith (Single Query Track)

    Competition results for the Arith - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    Arith (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + Vampire - - + + cvc5 - + @@ -131,7 +131,7 @@

    Arith (Single Query Track)

    - + z3n 0 1451 @@ -142,7 +142,7 @@

    Arith (Single Query Track)

    - + 2020-z3n 0 1429 @@ -153,7 +153,7 @@

    Arith (Single Query Track)

    - + 2019-Par4n 0 1221 @@ -164,7 +164,7 @@

    Arith (Single Query Track)

    - + cvc5 - fixedn 0 1182 @@ -175,7 +175,7 @@

    Arith (Single Query Track)

    - + cvc5 0 1181 @@ -186,7 +186,7 @@

    Arith (Single Query Track)

    - + 2020-CVC4n 0 1150 @@ -197,7 +197,7 @@

    Arith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 906 @@ -208,7 +208,7 @@

    Arith (Single Query Track)

    - + Vampire 0 678 @@ -219,7 +219,7 @@

    Arith (Single Query Track)

    - + Vampire - fixedn 0 666 @@ -230,7 +230,7 @@

    Arith (Single Query Track)

    - + Yices2-QS 0 297 @@ -241,7 +241,7 @@

    Arith (Single Query Track)

    - + iProver 0 120 @@ -252,7 +252,7 @@

    Arith (Single Query Track)

    - + SMTInterpol 0 67 @@ -263,7 +263,7 @@

    Arith (Single Query Track)

    - + veriT 0 49 @@ -274,7 +274,7 @@

    Arith (Single Query Track)

    - + 2018-Z3n 0 16 @@ -296,7 +296,7 @@

    Arith (Single Query Track)

    - + z3n 0 1451113386.051113358.97145169375860057 @@ -305,7 +305,7 @@

    Arith (Single Query Track)

    - + 2020-z3n 0 1429142978.936142873.849142969073982080 @@ -314,7 +314,7 @@

    Arith (Single Query Track)

    - + 2019-Par4n 0 1234133193.279108082.29312345976376920869 @@ -323,7 +323,7 @@

    Arith (Single Query Track)

    - + cvc5 - fixedn 0 1182420947.626420986.55111825336493290329 @@ -332,7 +332,7 @@

    Arith (Single Query Track)

    - + cvc5 0 1181421791.499421779.13311815306513300330 @@ -341,7 +341,7 @@

    Arith (Single Query Track)

    - + 2020-CVC4n 0 1150374788.068375389.611504956553610267 @@ -350,7 +350,7 @@

    Arith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 907707672.194699502.89073355726040550 @@ -359,7 +359,7 @@

    Arith (Single Query Track)

    - + Vampire 0 6841019203.368996887.39368446808270827 @@ -368,7 +368,7 @@

    Arith (Single Query Track)

    - + Vampire - fixedn 0 6671013858.7361001783.11966726658440832 @@ -377,7 +377,7 @@

    Arith (Single Query Track)

    - + Yices2-QS 0 2973875.5553875.65729721186312113 @@ -386,7 +386,7 @@

    Arith (Single Query Track)

    - + iProver 0 1231674638.7791661669.3211230123138801326 @@ -395,7 +395,7 @@

    Arith (Single Query Track)

    - + SMTInterpol 0 6774467.08572236.11567562112731749 @@ -404,7 +404,7 @@

    Arith (Single Query Track)

    - + veriT 0 4916178.35716171.67349049142132013 @@ -413,7 +413,7 @@

    Arith (Single Query Track)

    - + 2018-Z3n 0 169.3899.39416124114940 @@ -433,7 +433,7 @@

    Arith (Single Query Track)

    - + z3n 0 69321613.34921613.92869369301280657 @@ -442,7 +442,7 @@

    Arith (Single Query Track)

    - + 2020-z3n 0 69027952.49727949.52169069001580680 @@ -451,7 +451,7 @@

    Arith (Single Query Track)

    - + 2019-Par4n 0 59732814.64824244.88459759701390169 @@ -460,7 +460,7 @@

    Arith (Single Query Track)

    - + cvc5 - fixedn 0 533221787.553221804.875335330172806329 @@ -469,7 +469,7 @@

    Arith (Single Query Track)

    - + cvc5 0 530224436.544224480.1845305300175806330 @@ -478,7 +478,7 @@

    Arith (Single Query Track)

    - + 2020-CVC4n 0 495172157.635172640.5324954950210806267 @@ -487,7 +487,7 @@

    Arith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 335452896.624449813.1323353350370806550 @@ -496,7 +496,7 @@

    Arith (Single Query Track)

    - + Yices2-QS 0 211260.932261.0352112110013003 @@ -505,7 +505,7 @@

    Arith (Single Query Track)

    - + 2018-Z3n 0 120.8260.8312120014990 @@ -514,7 +514,7 @@

    Arith (Single Query Track)

    - + SMTInterpol 0 560290.02659091.023550477102949 @@ -523,7 +523,7 @@

    Arith (Single Query Track)

    - + Vampire 0 4852008.281841191.721440701806827 @@ -532,7 +532,7 @@

    Arith (Single Query Track)

    - + Vampire - fixedn 0 2847200.54843592.141220703806832 @@ -541,7 +541,7 @@

    Arith (Single Query Track)

    - + veriT 0 0101.19694.37700083142813 @@ -550,7 +550,7 @@

    Arith (Single Query Track)

    - + iProver 0 0843600.974843601.540007058061326 @@ -570,7 +570,7 @@

    Arith (Single Query Track)

    - + z3n 0 75872572.70372545.04375807583272157 @@ -579,7 +579,7 @@

    Arith (Single Query Track)

    - + 2020-z3n 0 73995826.43995724.32873907395172180 @@ -588,7 +588,7 @@

    Arith (Single Query Track)

    - + Vampire 0 680147995.087136495.6726800680110721827 @@ -597,7 +597,7 @@

    Arith (Single Query Track)

    - + Vampire - fixedn 0 665147458.195138990.9786650665125721832 @@ -606,7 +606,7 @@

    Arith (Single Query Track)

    - + 2020-CVC4n 0 655183430.433183549.0686550655135721267 @@ -615,7 +615,7 @@

    Arith (Single Query Track)

    - + cvc5 0 651178154.955178098.956510651139721330 @@ -624,7 +624,7 @@

    Arith (Single Query Track)

    - + cvc5 - fixedn 0 649179960.073179981.6816490649141721329 @@ -633,7 +633,7 @@

    Arith (Single Query Track)

    - + 2019-Par4n 0 63781178.63164637.40963706374083469 @@ -642,7 +642,7 @@

    Arith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 572235575.571230489.6685720572218721550 @@ -651,7 +651,7 @@

    Arith (Single Query Track)

    - + iProver 0 123811837.805798867.78112301236677211326 @@ -660,7 +660,7 @@

    Arith (Single Query Track)

    - + Yices2-QS 0 862414.6232414.62286086214233 @@ -669,7 +669,7 @@

    Arith (Single Query Track)

    - + SMTInterpol 0 6214140.60813130.8486206263581449 @@ -678,7 +678,7 @@

    Arith (Single Query Track)

    - + veriT 0 4916077.16116077.2964904959140313 @@ -687,7 +687,7 @@

    Arith (Single Query Track)

    - + 2018-Z3n 0 48.5638.563404115060 @@ -707,7 +707,7 @@

    Arith (Single Query Track)

    - + z3n 0 13165995.6755994.5413166686481950195 @@ -716,7 +716,7 @@

    Arith (Single Query Track)

    - + 2020-z3n 0 12966750.8416724.34312966596372150215 @@ -725,7 +725,7 @@

    Arith (Single Query Track)

    - + 2019-Par4n 0 11495213.0524517.531149569580154208154 @@ -734,7 +734,7 @@

    Arith (Single Query Track)

    - + cvc5 - fixedn 0 103711941.46511941.16110374366014740474 @@ -743,7 +743,7 @@

    Arith (Single Query Track)

    - + cvc5 0 103711955.53811952.65710374366014740474 @@ -752,7 +752,7 @@

    Arith (Single Query Track)

    - + 2020-CVC4n 0 102912123.53212117.510294335964820480 @@ -761,7 +761,7 @@

    Arith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 75024731.28220853.3377502854657610707 @@ -770,7 +770,7 @@

    Arith (Single Query Track)

    - + Vampire 0 66420903.44420584.88666446608470847 @@ -779,7 +779,7 @@

    Arith (Single Query Track)

    - + Vampire - fixedn 0 65120965.54420656.61965126498600851 @@ -788,7 +788,7 @@

    Arith (Single Query Track)

    - + Yices2-QS 0 296129.412129.43229621086412114 @@ -797,7 +797,7 @@

    Arith (Single Query Track)

    - + iProver 0 10434771.25633959.8461040104140701345 @@ -806,7 +806,7 @@

    Arith (Single Query Track)

    - + SMTInterpol 0 674638.4243258.1467562112731792 @@ -815,7 +815,7 @@

    Arith (Single Query Track)

    - + veriT 0 49479.917473.18649049142132015 @@ -824,7 +824,7 @@

    Arith (Single Query Track)

    - + 2018-Z3n 0 169.3899.39416124114940 @@ -848,7 +848,6 @@

    Arith (Single Query Track)

    - + - diff --git a/archive/2021/results/arith-unsat-core.html b/archive/2021/results/arith-unsat-core.html index d26b5f69..ecad788c 100644 --- a/archive/2021/results/arith-unsat-core.html +++ b/archive/2021/results/arith-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    Arith (Unsat Core Track)

    Competition results for the Arith - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    Arith (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5-uccvc5-uc - - + + @@ -126,7 +126,7 @@

    Arith (Unsat Core Track)

    - + 2020-CVC4-ucn 0 9 @@ -137,7 +137,7 @@

    Arith (Unsat Core Track)

    - + z3n 0 9 @@ -148,7 +148,7 @@

    Arith (Unsat Core Track)

    - + cvc5-uc 0 8 @@ -159,7 +159,7 @@

    Arith (Unsat Core Track)

    - + Vampire 0 1 @@ -170,7 +170,7 @@

    Arith (Unsat Core Track)

    - + SMTInterpol-remus 0 0 @@ -181,7 +181,7 @@

    Arith (Unsat Core Track)

    - + SMTInterpol 0 0 @@ -192,7 +192,7 @@

    Arith (Unsat Core Track)

    - + UltimateEliminator+MathSAT 1 4 @@ -214,7 +214,7 @@

    Arith (Unsat Core Track)

    - + 2020-CVC4-ucn 0 911.55111.6050 @@ -223,7 +223,7 @@

    Arith (Unsat Core Track)

    - + z3n 0 91672.341672.9771 @@ -232,7 +232,7 @@

    Arith (Unsat Core Track)

    - + cvc5-uc 0 81213.1381213.1361 @@ -241,7 +241,7 @@

    Arith (Unsat Core Track)

    - + Vampire 0 113410.56112471.96910 @@ -250,7 +250,7 @@

    Arith (Unsat Core Track)

    - + SMTInterpol-remus 0 023522.64923177.76215 @@ -259,7 +259,7 @@

    Arith (Unsat Core Track)

    - + SMTInterpol 0 023525.89123181.93215 @@ -268,7 +268,7 @@

    Arith (Unsat Core Track)

    - + UltimateEliminator+MathSAT 1 418036.80716938.112 @@ -292,7 +292,6 @@

    Arith (Unsat Core Track)

    - + - diff --git a/archive/2021/results/aufbv-single-query.html b/archive/2021/results/aufbv-single-query.html index 9ec2c710..7290c8b9 100644 --- a/archive/2021/results/aufbv-single-query.html +++ b/archive/2021/results/aufbv-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    AUFBV (Single Query Track)

    Competition results for the AUFBV - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    AUFBV (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    AUFBV (Single Query Track)

    - + z3n 0 163 @@ -142,7 +142,7 @@

    AUFBV (Single Query Track)

    - + cvc5 - fixedn 0 152 @@ -153,7 +153,7 @@

    AUFBV (Single Query Track)

    - + cvc5 0 150 @@ -164,7 +164,7 @@

    AUFBV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 4 @@ -186,7 +186,7 @@

    AUFBV (Single Query Track)

    - + z3n 0 163661100.848661614.19616350113598514 @@ -195,7 +195,7 @@

    AUFBV (Single Query Track)

    - + cvc5 - fixedn 0 152685886.521687841.4181529143609527 @@ -204,7 +204,7 @@

    AUFBV (Single Query Track)

    - + cvc5 0 150687143.12688916.69315010140611529 @@ -213,7 +213,7 @@

    AUFBV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 4867155.959866404.329404757716 @@ -233,7 +233,7 @@

    AUFBV (Single Query Track)

    - + z3n 0 504249.5814249.944505004707514 @@ -242,7 +242,7 @@

    AUFBV (Single Query Track)

    - + cvc5 0 1029294.40229779.4091010044707529 @@ -251,7 +251,7 @@

    AUFBV (Single Query Track)

    - + cvc5 - fixedn 0 930957.03831403.68899045707527 @@ -260,7 +260,7 @@

    AUFBV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 056801.16756700.99400054707716 @@ -280,7 +280,7 @@

    AUFBV (Single Query Track)

    - + cvc5 - fixedn 0 14364278.1864899.676143014346572527 @@ -289,7 +289,7 @@

    AUFBV (Single Query Track)

    - + cvc5 0 14065051.4665504.783140014049572529 @@ -298,7 +298,7 @@

    AUFBV (Single Query Track)

    - + z3n 0 11395775.65795740.549113011376572514 @@ -307,7 +307,7 @@

    AUFBV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 4203288.763202878.94404185572716 @@ -327,7 +327,7 @@

    AUFBV (Single Query Track)

    - + cvc5 - fixedn 0 11815719.51215721.181181117643630 @@ -336,7 +336,7 @@

    AUFBV (Single Query Track)

    - + cvc5 0 11815725.76115726.1591181117643630 @@ -345,7 +345,7 @@

    AUFBV (Single Query Track)

    - + z3n 0 10815804.41715803.421083870653623 @@ -354,7 +354,7 @@

    AUFBV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 318151.417970.888303758739 @@ -378,7 +378,6 @@

    AUFBV (Single Query Track)

    - + - diff --git a/archive/2021/results/aufbvdtlia-single-query.html b/archive/2021/results/aufbvdtlia-single-query.html index a7813bbe..974f50c6 100644 --- a/archive/2021/results/aufbvdtlia-single-query.html +++ b/archive/2021/results/aufbvdtlia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    AUFBVDTLIA (Single Query Track)

    Competition results for the AUFBVDTLIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    AUFBVDTLIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    AUFBVDTLIA (Single Query Track)

    - + 2020-CVC4n 0 452 @@ -142,7 +142,7 @@

    AUFBVDTLIA (Single Query Track)

    - + cvc5 0 375 @@ -153,7 +153,7 @@

    AUFBVDTLIA (Single Query Track)

    - + cvc5 - fixedn 0 364 @@ -175,7 +175,7 @@

    AUFBVDTLIA (Single Query Track)

    - + 2020-CVC4n 0 452508178.197511659.998452199253390364 @@ -184,7 +184,7 @@

    AUFBVDTLIA (Single Query Track)

    - + cvc5 0 375571465.197573416.975375140235467443 @@ -193,7 +193,7 @@

    AUFBVDTLIA (Single Query Track)

    - + cvc5 - fixedn 0 364579484.863581275.215364133231478453 @@ -213,7 +213,7 @@

    AUFBVDTLIA (Single Query Track)

    - + 2020-CVC4n 0 19950701.95453701.38319919900643364 @@ -222,7 +222,7 @@

    AUFBVDTLIA (Single Query Track)

    - + cvc5 0 14091310.03292885.555140140059643443 @@ -231,7 +231,7 @@

    AUFBVDTLIA (Single Query Track)

    - + cvc5 - fixedn 0 13397053.93798441.279133133066643453 @@ -251,7 +251,7 @@

    AUFBVDTLIA (Single Query Track)

    - + 2020-CVC4n 0 25324943.8925391.42525302535584364 @@ -260,7 +260,7 @@

    AUFBVDTLIA (Single Query Track)

    - + cvc5 0 23541699.58942039.635235023523584443 @@ -269,7 +269,7 @@

    AUFBVDTLIA (Single Query Track)

    - + cvc5 - fixedn 0 23145079.26245435.134231023127584453 @@ -289,7 +289,7 @@

    AUFBVDTLIA (Single Query Track)

    - + 2020-CVC4n 0 20315357.79715358.1492037196639639 @@ -298,7 +298,7 @@

    AUFBVDTLIA (Single Query Track)

    - + cvc5 0 20315366.14615366.0632037196639639 @@ -307,7 +307,7 @@

    AUFBVDTLIA (Single Query Track)

    - + cvc5 - fixedn 0 20315366.23615366.8462037196639639 @@ -331,7 +331,6 @@

    AUFBVDTLIA (Single Query Track)

    - + - diff --git a/archive/2021/results/aufbvdtlia-unsat-core.html b/archive/2021/results/aufbvdtlia-unsat-core.html index 0de68146..d0715d3d 100644 --- a/archive/2021/results/aufbvdtlia-unsat-core.html +++ b/archive/2021/results/aufbvdtlia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    AUFBVDTLIA (Unsat Core Track)

    Competition results for the AUFBVDTLIA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    AUFBVDTLIA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5-uccvc5-uc - - + + @@ -126,7 +126,7 @@

    AUFBVDTLIA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 2 @@ -137,7 +137,7 @@

    AUFBVDTLIA (Unsat Core Track)

    - + cvc5-uc 0 2 @@ -159,7 +159,7 @@

    AUFBVDTLIA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 26.1166.1560 @@ -168,7 +168,7 @@

    AUFBVDTLIA (Unsat Core Track)

    - + cvc5-uc 0 218.41318.4040 @@ -192,7 +192,6 @@

    AUFBVDTLIA (Unsat Core Track)

    - + - diff --git a/archive/2021/results/aufbvdtnia-single-query.html b/archive/2021/results/aufbvdtnia-single-query.html index e9843f54..656af60c 100644 --- a/archive/2021/results/aufbvdtnia-single-query.html +++ b/archive/2021/results/aufbvdtnia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    AUFBVDTNIA (Single Query Track)

    Competition results for the AUFBVDTNIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    AUFBVDTNIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) ——— - - + + — - - + + — - + @@ -131,7 +131,7 @@

    AUFBVDTNIA (Single Query Track)

    - + cvc5 0 0 @@ -142,7 +142,7 @@

    AUFBVDTNIA (Single Query Track)

    - + cvc5 - fixedn 0 0 @@ -164,7 +164,7 @@

    AUFBVDTNIA (Single Query Track)

    - + cvc5 0 00.0220.02100010 @@ -173,7 +173,7 @@

    AUFBVDTNIA (Single Query Track)

    - + cvc5 - fixedn 0 00.0230.02200010 @@ -193,7 +193,7 @@

    AUFBVDTNIA (Single Query Track)

    - + cvc5 0 00.00.0000010 @@ -202,7 +202,7 @@

    AUFBVDTNIA (Single Query Track)

    - + cvc5 - fixedn 0 00.00.0000010 @@ -222,7 +222,7 @@

    AUFBVDTNIA (Single Query Track)

    - + cvc5 0 00.00.0000010 @@ -231,7 +231,7 @@

    AUFBVDTNIA (Single Query Track)

    - + cvc5 - fixedn 0 00.00.0000010 @@ -251,7 +251,7 @@

    AUFBVDTNIA (Single Query Track)

    - + cvc5 0 00.0220.02100010 @@ -260,7 +260,7 @@

    AUFBVDTNIA (Single Query Track)

    - + cvc5 - fixedn 0 00.0230.02200010 @@ -284,7 +284,6 @@

    AUFBVDTNIA (Single Query Track)

    - + - diff --git a/archive/2021/results/aufbvfp-single-query.html b/archive/2021/results/aufbvfp-single-query.html index c3b1b7d3..a2c994db 100644 --- a/archive/2021/results/aufbvfp-single-query.html +++ b/archive/2021/results/aufbvfp-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    AUFBVFP (Single Query Track)

    Competition results for the AUFBVFP - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    AUFBVFP (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5— - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    AUFBVFP (Single Query Track)

    - + cvc5 0 17 @@ -142,7 +142,7 @@

    AUFBVFP (Single Query Track)

    - + cvc5 - fixedn 0 16 @@ -153,7 +153,7 @@

    AUFBVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -175,7 +175,7 @@

    AUFBVFP (Single Query Track)

    - + cvc5 0 1745342.91345523.502170174035 @@ -184,7 +184,7 @@

    AUFBVFP (Single Query Track)

    - + cvc5 - fixedn 0 1645800.5545859.684160164136 @@ -193,7 +193,7 @@

    AUFBVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0266.459159.18000570 @@ -213,7 +213,7 @@

    AUFBVFP (Single Query Track)

    - + cvc5 0 00.00.000005735 @@ -222,7 +222,7 @@

    AUFBVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.00000570 @@ -231,7 +231,7 @@

    AUFBVFP (Single Query Track)

    - + cvc5 - fixedn 0 00.00.000005736 @@ -251,7 +251,7 @@

    AUFBVFP (Single Query Track)

    - + cvc5 0 17707.265806.6311701704035 @@ -260,7 +260,7 @@

    AUFBVFP (Single Query Track)

    - + cvc5 - fixedn 0 161389.5281384.6931601614036 @@ -269,7 +269,7 @@

    AUFBVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 077.25246.26100017400 @@ -289,7 +289,7 @@

    AUFBVFP (Single Query Track)

    - + cvc5 - fixedn 0 121160.4231155.533120124545 @@ -298,7 +298,7 @@

    AUFBVFP (Single Query Track)

    - + cvc5 0 121157.5581157.589120124545 @@ -307,7 +307,7 @@

    AUFBVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0266.459159.18000570 @@ -331,7 +331,6 @@

    AUFBVFP (Single Query Track)

    - + - diff --git a/archive/2021/results/aufdtlia-single-query.html b/archive/2021/results/aufdtlia-single-query.html index 4fa3af1c..458e9616 100644 --- a/archive/2021/results/aufdtlia-single-query.html +++ b/archive/2021/results/aufdtlia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    AUFDTLIA (Single Query Track)

    Competition results for the AUFDTLIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    AUFDTLIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    AUFDTLIA (Single Query Track)

    - + 2020-CVC4n 0 147 @@ -142,7 +142,7 @@

    AUFDTLIA (Single Query Track)

    - + cvc5 - fixedn 0 119 @@ -153,7 +153,7 @@

    AUFDTLIA (Single Query Track)

    - + cvc5 0 115 @@ -164,7 +164,7 @@

    AUFDTLIA (Single Query Track)

    - + 2018-CVC4n 0 99 @@ -175,7 +175,7 @@

    AUFDTLIA (Single Query Track)

    - + Vampire 0 54 @@ -186,7 +186,7 @@

    AUFDTLIA (Single Query Track)

    - + Vampire - fixedn 0 54 @@ -197,7 +197,7 @@

    AUFDTLIA (Single Query Track)

    - + 2020-Vampiren 0 54 @@ -219,7 +219,7 @@

    AUFDTLIA (Single Query Track)

    - + 2020-CVC4n 0 14723097.12423393.936147935400 @@ -228,7 +228,7 @@

    AUFDTLIA (Single Query Track)

    - + cvc5 - fixedn 0 11942970.23643111.28211965542828 @@ -237,7 +237,7 @@

    AUFDTLIA (Single Query Track)

    - + cvc5 0 11545371.69945490.89411561543232 @@ -246,7 +246,7 @@

    AUFDTLIA (Single Query Track)

    - + 2018-CVC4n 0 9923006.88923266.247994554480 @@ -255,7 +255,7 @@

    AUFDTLIA (Single Query Track)

    - + Vampire - fixedn 0 54111768.114111678.868540549393 @@ -264,7 +264,7 @@

    AUFDTLIA (Single Query Track)

    - + Vampire 0 54111785.045111686.671540549393 @@ -273,7 +273,7 @@

    AUFDTLIA (Single Query Track)

    - + 2020-Vampiren 0 54112428.072111919.995540549393 @@ -293,7 +293,7 @@

    AUFDTLIA (Single Query Track)

    - + 2020-CVC4n 0 9323091.17723388.004939300540 @@ -302,7 +302,7 @@

    AUFDTLIA (Single Query Track)

    - + cvc5 - fixedn 0 6542967.57743108.6565650285428 @@ -311,7 +311,7 @@

    AUFDTLIA (Single Query Track)

    - + cvc5 0 6145369.03345488.25861610325432 @@ -320,7 +320,7 @@

    AUFDTLIA (Single Query Track)

    - + 2018-CVC4n 0 4523001.2423260.6144545048540 @@ -329,7 +329,7 @@

    AUFDTLIA (Single Query Track)

    - + 2020-Vampiren 0 0111600.0111600.0000935493 @@ -338,7 +338,7 @@

    AUFDTLIA (Single Query Track)

    - + Vampire 0 0111600.0111600.0000935493 @@ -347,7 +347,7 @@

    AUFDTLIA (Single Query Track)

    - + Vampire - fixedn 0 0111600.0111600.0000935493 @@ -367,7 +367,7 @@

    AUFDTLIA (Single Query Track)

    - + cvc5 - fixedn 0 542.6592.6325405409328 @@ -376,7 +376,7 @@

    AUFDTLIA (Single Query Track)

    - + cvc5 0 542.6662.6365405409332 @@ -385,7 +385,7 @@

    AUFDTLIA (Single Query Track)

    - + 2018-CVC4n 0 545.6495.633540540930 @@ -394,7 +394,7 @@

    AUFDTLIA (Single Query Track)

    - + 2020-CVC4n 0 545.9475.931540540930 @@ -403,7 +403,7 @@

    AUFDTLIA (Single Query Track)

    - + Vampire - fixedn 0 54168.11478.8685405409393 @@ -412,7 +412,7 @@

    AUFDTLIA (Single Query Track)

    - + Vampire 0 54185.04586.6715405409393 @@ -421,7 +421,7 @@

    AUFDTLIA (Single Query Track)

    - + 2020-Vampiren 0 54828.072319.9955405409393 @@ -441,7 +441,7 @@

    AUFDTLIA (Single Query Track)

    - + cvc5 - fixedn 0 991170.5971170.9459945544848 @@ -450,7 +450,7 @@

    AUFDTLIA (Single Query Track)

    - + 2018-CVC4n 0 991171.2891171.1749945544848 @@ -459,7 +459,7 @@

    AUFDTLIA (Single Query Track)

    - + cvc5 0 991171.0591171.4549945544848 @@ -468,7 +468,7 @@

    AUFDTLIA (Single Query Track)

    - + 2020-CVC4n 0 991172.8631174.3859945544848 @@ -477,7 +477,7 @@

    AUFDTLIA (Single Query Track)

    - + Vampire - fixedn 0 542400.1142310.868540549393 @@ -486,7 +486,7 @@

    AUFDTLIA (Single Query Track)

    - + Vampire 0 532381.8052313.733530539494 @@ -495,7 +495,7 @@

    AUFDTLIA (Single Query Track)

    - + 2020-Vampiren 0 502525.2322418.806500509797 @@ -519,7 +519,6 @@

    AUFDTLIA (Single Query Track)

    - + - diff --git a/archive/2021/results/aufdtlira-cloud.html b/archive/2021/results/aufdtlira-cloud.html index 5f777283..bd9a5ebe 100644 --- a/archive/2021/results/aufdtlira-cloud.html +++ b/archive/2021/results/aufdtlira-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    AUFDTLIRA (Cloud Track)

    Competition results for the AUFDTLIRA - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    AUFDTLIRA (Cloud Track)

    - + cvc5-gg 0 122424.1371201242 @@ -126,7 +126,7 @@

    AUFDTLIRA (Cloud Track)

    - + Vampire 0 712011.56770799 @@ -146,7 +146,7 @@

    AUFDTLIRA (Cloud Track)

    - + cvc5-gg 0 00.00000162 @@ -155,7 +155,7 @@

    AUFDTLIRA (Cloud Track)

    - + Vampire 0 00.00000169 @@ -175,7 +175,7 @@

    AUFDTLIRA (Cloud Track)

    - + cvc5-gg 0 1224.13712012132 @@ -184,7 +184,7 @@

    AUFDTLIRA (Cloud Track)

    - + Vampire 0 78411.567707639 @@ -204,7 +204,7 @@

    AUFDTLIRA (Cloud Track)

    - + cvc5-gg 0 1272.1371201242 @@ -213,7 +213,7 @@

    AUFDTLIRA (Cloud Track)

    - + Vampire 0 0384.00001616 @@ -237,7 +237,6 @@

    AUFDTLIRA (Cloud Track)

    - + - diff --git a/archive/2021/results/aufdtlira-parallel.html b/archive/2021/results/aufdtlira-parallel.html index 1097a3ba..afb45252 100644 --- a/archive/2021/results/aufdtlira-parallel.html +++ b/archive/2021/results/aufdtlira-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    AUFDTLIRA (Parallel Track)

    Competition results for the AUFDTLIRA - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    AUFDTLIRA (Parallel Track)

    - + cvc5-gg 0 132425.8571301342 @@ -126,7 +126,7 @@

    AUFDTLIRA (Parallel Track)

    - + Vampire 0 811986.22780899 @@ -146,7 +146,7 @@

    AUFDTLIRA (Parallel Track)

    - + cvc5-gg 0 00.00000172 @@ -155,7 +155,7 @@

    AUFDTLIRA (Parallel Track)

    - + Vampire 0 00.00000179 @@ -175,7 +175,7 @@

    AUFDTLIRA (Parallel Track)

    - + cvc5-gg 0 1325.85713013132 @@ -184,7 +184,7 @@

    AUFDTLIRA (Parallel Track)

    - + Vampire 0 88386.227808639 @@ -204,7 +204,7 @@

    AUFDTLIRA (Parallel Track)

    - + cvc5-gg 0 1373.8571301342 @@ -213,7 +213,7 @@

    AUFDTLIRA (Parallel Track)

    - + Vampire 0 0408.00001717 @@ -237,7 +237,6 @@

    AUFDTLIRA (Parallel Track)

    - + - diff --git a/archive/2021/results/aufdtlira-single-query.html b/archive/2021/results/aufdtlira-single-query.html index 3e6e5966..e78f88e1 100644 --- a/archive/2021/results/aufdtlira-single-query.html +++ b/archive/2021/results/aufdtlira-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    AUFDTLIRA (Single Query Track)

    Competition results for the AUFDTLIRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    AUFDTLIRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5— - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    AUFDTLIRA (Single Query Track)

    - + cvc5 - fixedn 0 4946 @@ -142,7 +142,7 @@

    AUFDTLIRA (Single Query Track)

    - + cvc5 0 4946 @@ -153,7 +153,7 @@

    AUFDTLIRA (Single Query Track)

    - + 2020-CVC4n 0 4937 @@ -164,7 +164,7 @@

    AUFDTLIRA (Single Query Track)

    - + 2020-Vampiren 0 4645 @@ -175,7 +175,7 @@

    AUFDTLIRA (Single Query Track)

    - + Vampire - fixedn 0 4456 @@ -186,7 +186,7 @@

    AUFDTLIRA (Single Query Track)

    - + Vampire 0 4445 @@ -208,7 +208,7 @@

    AUFDTLIRA (Single Query Track)

    - + cvc5 - fixedn 0 494665839.33265829.54849460494657453 @@ -217,7 +217,7 @@

    AUFDTLIRA (Single Query Track)

    - + cvc5 0 494665920.4565915.29849460494657453 @@ -226,7 +226,7 @@

    AUFDTLIRA (Single Query Track)

    - + 2020-CVC4n 0 493752113.84652112.55449370493758343 @@ -235,7 +235,7 @@

    AUFDTLIRA (Single Query Track)

    - + 2020-Vampiren 0 47031438531.558934262.223470304703817603 @@ -244,7 +244,7 @@

    AUFDTLIRA (Single Query Track)

    - + Vampire 0 46481921036.5121281645.076464804648872869 @@ -253,7 +253,7 @@

    AUFDTLIRA (Single Query Track)

    - + Vampire - fixedn 0 46131869645.3811269393.177461304613907861 @@ -273,7 +273,7 @@

    AUFDTLIRA (Single Query Track)

    - + 2020-Vampiren 0 00.00.000005520603 @@ -282,7 +282,7 @@

    AUFDTLIRA (Single Query Track)

    - + 2020-CVC4n 0 00.00.00000552043 @@ -291,7 +291,7 @@

    AUFDTLIRA (Single Query Track)

    - + cvc5 0 00.00.00000552053 @@ -300,7 +300,7 @@

    AUFDTLIRA (Single Query Track)

    - + Vampire 0 00.00.000005520869 @@ -309,7 +309,7 @@

    AUFDTLIRA (Single Query Track)

    - + Vampire - fixedn 0 00.00.000005520861 @@ -318,7 +318,7 @@

    AUFDTLIRA (Single Query Track)

    - + cvc5 - fixedn 0 00.00.00000552053 @@ -338,7 +338,7 @@

    AUFDTLIRA (Single Query Track)

    - + cvc5 - fixedn 0 494643648.95843639.26549460494625032453 @@ -347,7 +347,7 @@

    AUFDTLIRA (Single Query Track)

    - + cvc5 0 494643713.58443708.0849460494625032453 @@ -356,7 +356,7 @@

    AUFDTLIRA (Single Query Track)

    - + 2020-CVC4n 0 493734087.97134086.79449370493725932443 @@ -365,7 +365,7 @@

    AUFDTLIRA (Single Query Track)

    - + 2020-Vampiren 0 47031054617.138551971.87470304703493324603 @@ -374,7 +374,7 @@

    AUFDTLIRA (Single Query Track)

    - + Vampire 0 46481528636.382892854.626464804648548324869 @@ -383,7 +383,7 @@

    AUFDTLIRA (Single Query Track)

    - + Vampire - fixedn 0 46131480845.381880593.177461304613583324861 @@ -403,7 +403,7 @@

    AUFDTLIRA (Single Query Track)

    - + cvc5 0 49442030.8582025.36549440494457662 @@ -412,7 +412,7 @@

    AUFDTLIRA (Single Query Track)

    - + cvc5 - fixedn 0 49432045.0312034.91449430494357763 @@ -421,7 +421,7 @@

    AUFDTLIRA (Single Query Track)

    - + 2020-CVC4n 0 49371510.4261509.13549370493758344 @@ -430,7 +430,7 @@

    AUFDTLIRA (Single Query Track)

    - + Vampire 0 376251228.46846462.82237620376217581758 @@ -439,7 +439,7 @@

    AUFDTLIRA (Single Query Track)

    - + Vampire - fixedn 0 373751293.77146429.89837370373717831751 @@ -448,7 +448,7 @@

    AUFDTLIRA (Single Query Track)

    - + 2020-Vampiren 0 334058750.26454966.44133400334021802148 @@ -472,7 +472,6 @@

    AUFDTLIRA (Single Query Track)

    - + - diff --git a/archive/2021/results/aufdtlira-unsat-core.html b/archive/2021/results/aufdtlira-unsat-core.html index 72d807b3..180083c4 100644 --- a/archive/2021/results/aufdtlira-unsat-core.html +++ b/archive/2021/results/aufdtlira-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    AUFDTLIRA (Unsat Core Track)

    Competition results for the AUFDTLIRA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    AUFDTLIRA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5-uccvc5-uc - - + + @@ -126,7 +126,7 @@

    AUFDTLIRA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 193188 @@ -137,7 +137,7 @@

    AUFDTLIRA (Unsat Core Track)

    - + cvc5-uc 0 192840 @@ -148,7 +148,7 @@

    AUFDTLIRA (Unsat Core Track)

    - + Vampire 0 50100 @@ -170,7 +170,7 @@

    AUFDTLIRA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 19318810150.21810164.298 @@ -179,7 +179,7 @@

    AUFDTLIRA (Unsat Core Track)

    - + cvc5-uc 0 19284013913.52713912.15110 @@ -188,7 +188,7 @@

    AUFDTLIRA (Unsat Core Track)

    - + Vampire 0 504671264504.104667248.392369 @@ -212,7 +212,6 @@

    AUFDTLIRA (Unsat Core Track)

    - + - diff --git a/archive/2021/results/aufdtnira-cloud.html b/archive/2021/results/aufdtnira-cloud.html index 68807f90..b8180bfb 100644 --- a/archive/2021/results/aufdtnira-cloud.html +++ b/archive/2021/results/aufdtnira-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    AUFDTNIRA (Cloud Track)

    Competition results for the AUFDTNIRA - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    AUFDTNIRA (Cloud Track)

    - + Vampire 0 512902.51350599 @@ -126,7 +126,7 @@

    AUFDTNIRA (Cloud Track)

    - + cvc5-gg 0 01200.0000161 @@ -146,7 +146,7 @@

    AUFDTNIRA (Cloud Track)

    - + cvc5-gg 0 00.00000161 @@ -155,7 +155,7 @@

    AUFDTNIRA (Cloud Track)

    - + Vampire 0 00.00000169 @@ -175,7 +175,7 @@

    AUFDTNIRA (Cloud Track)

    - + Vampire 0 52102.5135050119 @@ -184,7 +184,7 @@

    AUFDTNIRA (Cloud Track)

    - + cvc5-gg 0 01200.00005111 @@ -204,7 +204,7 @@

    AUFDTNIRA (Cloud Track)

    - + cvc5-gg 0 024.0000161 @@ -213,7 +213,7 @@

    AUFDTNIRA (Cloud Track)

    - + Vampire 0 0336.00001414 @@ -237,7 +237,6 @@

    AUFDTNIRA (Cloud Track)

    - + - diff --git a/archive/2021/results/aufdtnira-parallel.html b/archive/2021/results/aufdtnira-parallel.html index 637a99b9..cef95b44 100644 --- a/archive/2021/results/aufdtnira-parallel.html +++ b/archive/2021/results/aufdtnira-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    AUFDTNIRA (Parallel Track)

    Competition results for the AUFDTNIRA - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    AUFDTNIRA (Parallel Track)

    - + Vampire 0 317230.7333031414 @@ -126,7 +126,7 @@

    AUFDTNIRA (Parallel Track)

    - + cvc5-gg 0 01200.0000171 @@ -146,7 +146,7 @@

    AUFDTNIRA (Parallel Track)

    - + cvc5-gg 0 00.00000171 @@ -155,7 +155,7 @@

    AUFDTNIRA (Parallel Track)

    - + Vampire 0 00.000001714 @@ -175,7 +175,7 @@

    AUFDTNIRA (Parallel Track)

    - + Vampire 0 3430.73330301414 @@ -184,7 +184,7 @@

    AUFDTNIRA (Parallel Track)

    - + cvc5-gg 0 00.00003141 @@ -204,7 +204,7 @@

    AUFDTNIRA (Parallel Track)

    - + cvc5-gg 0 024.0000171 @@ -213,7 +213,7 @@

    AUFDTNIRA (Parallel Track)

    - + Vampire 0 0408.00001717 @@ -237,7 +237,6 @@

    AUFDTNIRA (Parallel Track)

    - + - diff --git a/archive/2021/results/aufdtnira-single-query.html b/archive/2021/results/aufdtnira-single-query.html index e944953e..af92bfae 100644 --- a/archive/2021/results/aufdtnira-single-query.html +++ b/archive/2021/results/aufdtnira-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    AUFDTNIRA (Single Query Track)

    Competition results for the AUFDTNIRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    AUFDTNIRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5Vampire— - - + + Vampire - - + + cvc5 - + @@ -131,7 +131,7 @@

    AUFDTNIRA (Single Query Track)

    - + cvc5 0 205 @@ -142,7 +142,7 @@

    AUFDTNIRA (Single Query Track)

    - + cvc5 - fixedn 0 205 @@ -153,7 +153,7 @@

    AUFDTNIRA (Single Query Track)

    - + 2020-CVC4n 0 201 @@ -164,7 +164,7 @@

    AUFDTNIRA (Single Query Track)

    - + 2020-Vampiren 0 200 @@ -175,7 +175,7 @@

    AUFDTNIRA (Single Query Track)

    - + Vampire - fixedn 0 191 @@ -186,7 +186,7 @@

    AUFDTNIRA (Single Query Track)

    - + Vampire 0 186 @@ -208,7 +208,7 @@

    AUFDTNIRA (Single Query Track)

    - + 2020-Vampiren 0 238235896.34568009.8652380238625 @@ -217,7 +217,7 @@

    AUFDTNIRA (Single Query Track)

    - + Vampire 0 207205540.358137253.89520702079393 @@ -226,7 +226,7 @@

    AUFDTNIRA (Single Query Track)

    - + cvc5 0 20544684.45844684.91420502059537 @@ -235,7 +235,7 @@

    AUFDTNIRA (Single Query Track)

    - + cvc5 - fixedn 0 20544687.84244687.87920502059537 @@ -244,7 +244,7 @@

    AUFDTNIRA (Single Query Track)

    - + Vampire - fixedn 0 205196032.852136492.86820502059594 @@ -253,7 +253,7 @@

    AUFDTNIRA (Single Query Track)

    - + 2020-CVC4n 0 20148028.4948028.45620102019940 @@ -273,7 +273,7 @@

    AUFDTNIRA (Single Query Track)

    - + 2020-Vampiren 0 00.00.000003005 @@ -282,7 +282,7 @@

    AUFDTNIRA (Single Query Track)

    - + 2020-CVC4n 0 00.00.0000030040 @@ -291,7 +291,7 @@

    AUFDTNIRA (Single Query Track)

    - + cvc5 0 00.00.0000030037 @@ -300,7 +300,7 @@

    AUFDTNIRA (Single Query Track)

    - + Vampire 0 00.00.0000030093 @@ -309,7 +309,7 @@

    AUFDTNIRA (Single Query Track)

    - + Vampire - fixedn 0 00.00.0000030094 @@ -318,7 +318,7 @@

    AUFDTNIRA (Single Query Track)

    - + cvc5 - fixedn 0 00.00.0000030037 @@ -338,7 +338,7 @@

    AUFDTNIRA (Single Query Track)

    - + 2020-Vampiren 0 238173372.22551299.804238023829335 @@ -347,7 +347,7 @@

    AUFDTNIRA (Single Query Track)

    - + Vampire 0 207165940.35897653.8952070207603393 @@ -356,7 +356,7 @@

    AUFDTNIRA (Single Query Track)

    - + cvc5 0 20517071.65317071.6482050205623337 @@ -365,7 +365,7 @@

    AUFDTNIRA (Single Query Track)

    - + cvc5 - fixedn 0 20517075.13117075.1562050205623337 @@ -374,7 +374,7 @@

    AUFDTNIRA (Single Query Track)

    - + Vampire - fixedn 0 205156432.85296892.8682050205623394 @@ -383,7 +383,7 @@

    AUFDTNIRA (Single Query Track)

    - + 2020-CVC4n 0 20119227.48619227.4552010201663340 @@ -403,7 +403,7 @@

    AUFDTNIRA (Single Query Track)

    - + cvc5 - fixedn 0 2031013.0571013.04720302039739 @@ -412,7 +412,7 @@

    AUFDTNIRA (Single Query Track)

    - + cvc5 0 2031012.751013.15620302039739 @@ -421,7 +421,7 @@

    AUFDTNIRA (Single Query Track)

    - + 2020-CVC4n 0 201988.49988.45620102019940 @@ -430,7 +430,7 @@

    AUFDTNIRA (Single Query Track)

    - + Vampire - fixedn 0 1175036.2624690.4421170117183183 @@ -439,7 +439,7 @@

    AUFDTNIRA (Single Query Track)

    - + Vampire 0 1175150.2484748.8731170117183183 @@ -448,7 +448,7 @@

    AUFDTNIRA (Single Query Track)

    - + 2020-Vampiren 0 1055187.9744876.3361050105195192 @@ -472,7 +472,6 @@

    AUFDTNIRA (Single Query Track)

    - + - diff --git a/archive/2021/results/aufdtnira-unsat-core.html b/archive/2021/results/aufdtnira-unsat-core.html index 83798513..15436970 100644 --- a/archive/2021/results/aufdtnira-unsat-core.html +++ b/archive/2021/results/aufdtnira-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    AUFDTNIRA (Unsat Core Track)

    Competition results for the AUFDTNIRA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    AUFDTNIRA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5-uccvc5-uc - - + + @@ -126,7 +126,7 @@

    AUFDTNIRA (Unsat Core Track)

    - + cvc5-uc 0 1966 @@ -137,7 +137,7 @@

    AUFDTNIRA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 1950 @@ -148,7 +148,7 @@

    AUFDTNIRA (Unsat Core Track)

    - + Vampire 0 99 @@ -170,7 +170,7 @@

    AUFDTNIRA (Unsat Core Track)

    - + cvc5-uc 0 19665.9985.9960 @@ -179,7 +179,7 @@

    AUFDTNIRA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 19505.1255.1220 @@ -188,7 +188,7 @@

    AUFDTNIRA (Unsat Core Track)

    - + Vampire 0 9919424.0965354.6310 @@ -212,7 +212,6 @@

    AUFDTNIRA (Unsat Core Track)

    - + - diff --git a/archive/2021/results/auffpdtlira-single-query.html b/archive/2021/results/auffpdtlira-single-query.html index 081560eb..01ffca4b 100644 --- a/archive/2021/results/auffpdtlira-single-query.html +++ b/archive/2021/results/auffpdtlira-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    AUFFPDTLIRA (Single Query Track)

    Competition results for the AUFFPDTLIRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    AUFFPDTLIRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5— - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    AUFFPDTLIRA (Single Query Track)

    - + 2020-CVC4n 0 114 @@ -142,7 +142,7 @@

    AUFFPDTLIRA (Single Query Track)

    - + cvc5 0 112 @@ -153,7 +153,7 @@

    AUFFPDTLIRA (Single Query Track)

    - + cvc5 - fixedn 0 112 @@ -175,7 +175,7 @@

    AUFFPDTLIRA (Single Query Track)

    - + 2020-CVC4n 0 11419.99119.9551140114180 @@ -184,7 +184,7 @@

    AUFFPDTLIRA (Single Query Track)

    - + cvc5 0 1122413.1782413.1121120112202 @@ -193,7 +193,7 @@

    AUFFPDTLIRA (Single Query Track)

    - + cvc5 - fixedn 0 1122413.292413.2341120112202 @@ -213,7 +213,7 @@

    AUFFPDTLIRA (Single Query Track)

    - + 2020-CVC4n 0 00.00.000001320 @@ -222,7 +222,7 @@

    AUFFPDTLIRA (Single Query Track)

    - + cvc5 0 00.00.000001322 @@ -231,7 +231,7 @@

    AUFFPDTLIRA (Single Query Track)

    - + cvc5 - fixedn 0 00.00.000001322 @@ -251,7 +251,7 @@

    AUFFPDTLIRA (Single Query Track)

    - + 2020-CVC4n 0 11417.40417.3711401140180 @@ -260,7 +260,7 @@

    AUFFPDTLIRA (Single Query Track)

    - + cvc5 0 1122411.132411.07211201122182 @@ -269,7 +269,7 @@

    AUFFPDTLIRA (Single Query Track)

    - + cvc5 - fixedn 0 1122411.2322411.18211201122182 @@ -289,7 +289,7 @@

    AUFFPDTLIRA (Single Query Track)

    - + 2020-CVC4n 0 11419.99119.9551140114180 @@ -298,7 +298,7 @@

    AUFFPDTLIRA (Single Query Track)

    - + cvc5 0 11261.17861.1121120112202 @@ -307,7 +307,7 @@

    AUFFPDTLIRA (Single Query Track)

    - + cvc5 - fixedn 0 11261.2961.2341120112202 @@ -331,7 +331,6 @@

    AUFFPDTLIRA (Single Query Track)

    - + - diff --git a/archive/2021/results/auffpdtlira-unsat-core.html b/archive/2021/results/auffpdtlira-unsat-core.html index 96e1e1ed..df9fd38c 100644 --- a/archive/2021/results/auffpdtlira-unsat-core.html +++ b/archive/2021/results/auffpdtlira-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    AUFFPDTLIRA (Unsat Core Track)

    Competition results for the AUFFPDTLIRA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    AUFFPDTLIRA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5-uccvc5-uc - - + + @@ -126,7 +126,7 @@

    AUFFPDTLIRA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 3487 @@ -137,7 +137,7 @@

    AUFFPDTLIRA (Unsat Core Track)

    - + cvc5-uc 0 3481 @@ -159,7 +159,7 @@

    AUFFPDTLIRA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 348714.2911.6370 @@ -168,7 +168,7 @@

    AUFFPDTLIRA (Unsat Core Track)

    - + cvc5-uc 0 348112.07112.020 @@ -192,7 +192,6 @@

    AUFFPDTLIRA (Unsat Core Track)

    - + - diff --git a/archive/2021/results/auffpdtnira-single-query.html b/archive/2021/results/auffpdtnira-single-query.html index c2ccb31d..26024d4d 100644 --- a/archive/2021/results/auffpdtnira-single-query.html +++ b/archive/2021/results/auffpdtnira-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    AUFFPDTNIRA (Single Query Track)

    Competition results for the AUFFPDTNIRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    AUFFPDTNIRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5— - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    AUFFPDTNIRA (Single Query Track)

    - + cvc5 0 7 @@ -142,7 +142,7 @@

    AUFFPDTNIRA (Single Query Track)

    - + cvc5 - fixedn 0 7 @@ -164,7 +164,7 @@

    AUFFPDTNIRA (Single Query Track)

    - + cvc5 0 77200.9927200.98870766 @@ -173,7 +173,7 @@

    AUFFPDTNIRA (Single Query Track)

    - + cvc5 - fixedn 0 77200.9977200.99370766 @@ -193,7 +193,7 @@

    AUFFPDTNIRA (Single Query Track)

    - + cvc5 0 00.00.00000136 @@ -202,7 +202,7 @@

    AUFFPDTNIRA (Single Query Track)

    - + cvc5 - fixedn 0 00.00.00000136 @@ -222,7 +222,7 @@

    AUFFPDTNIRA (Single Query Track)

    - + cvc5 0 70.9920.988707066 @@ -231,7 +231,7 @@

    AUFFPDTNIRA (Single Query Track)

    - + cvc5 - fixedn 0 70.9970.993707066 @@ -251,7 +251,7 @@

    AUFFPDTNIRA (Single Query Track)

    - + cvc5 0 7144.992144.98870766 @@ -260,7 +260,7 @@

    AUFFPDTNIRA (Single Query Track)

    - + cvc5 - fixedn 0 7144.997144.99370766 @@ -284,7 +284,6 @@

    AUFFPDTNIRA (Single Query Track)

    - + - diff --git a/archive/2021/results/auflia-cloud.html b/archive/2021/results/auflia-cloud.html index 25c2e728..61a1f3d4 100644 --- a/archive/2021/results/auflia-cloud.html +++ b/archive/2021/results/auflia-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    AUFLIA (Cloud Track)

    Competition results for the AUFLIA - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    AUFLIA (Cloud Track)

    - + Vampire 0 89797.55880888 @@ -126,7 +126,7 @@

    AUFLIA (Cloud Track)

    - + cvc5-gg 0 216805.0342021414 @@ -146,7 +146,7 @@

    AUFLIA (Cloud Track)

    - + cvc5-gg 0 00.000001614 @@ -155,7 +155,7 @@

    AUFLIA (Cloud Track)

    - + Vampire 0 00.00000168 @@ -175,7 +175,7 @@

    AUFLIA (Cloud Track)

    - + Vampire 0 81397.558808178 @@ -184,7 +184,7 @@

    AUFLIA (Cloud Track)

    - + cvc5-gg 0 28405.0342027714 @@ -204,7 +204,7 @@

    AUFLIA (Cloud Track)

    - + Vampire 0 5291.085051111 @@ -213,7 +213,7 @@

    AUFLIA (Cloud Track)

    - + cvc5-gg 0 2341.0342021414 @@ -237,7 +237,6 @@

    AUFLIA (Cloud Track)

    - + - diff --git a/archive/2021/results/auflia-parallel.html b/archive/2021/results/auflia-parallel.html index ba10b033..1a1ac1b3 100644 --- a/archive/2021/results/auflia-parallel.html +++ b/archive/2021/results/auflia-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    AUFLIA (Parallel Track)

    Competition results for the AUFLIA - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    AUFLIA (Parallel Track)

    - + Vampire 0 99996.32390988 @@ -126,7 +126,7 @@

    AUFLIA (Parallel Track)

    - + cvc5-gg 0 27204.937202156 @@ -146,7 +146,7 @@

    AUFLIA (Parallel Track)

    - + cvc5-gg 0 00.00000176 @@ -155,7 +155,7 @@

    AUFLIA (Parallel Track)

    - + Vampire 0 00.00000178 @@ -175,7 +175,7 @@

    AUFLIA (Parallel Track)

    - + Vampire 0 91596.323909178 @@ -184,7 +184,7 @@

    AUFLIA (Parallel Track)

    - + cvc5-gg 0 22404.937202876 @@ -204,7 +204,7 @@

    AUFLIA (Parallel Track)

    - + Vampire 0 5307.3665051212 @@ -213,7 +213,7 @@

    AUFLIA (Parallel Track)

    - + cvc5-gg 0 2148.937202156 @@ -237,7 +237,6 @@

    AUFLIA (Parallel Track)

    - + - diff --git a/archive/2021/results/auflia-single-query.html b/archive/2021/results/auflia-single-query.html index 8615164b..9862043b 100644 --- a/archive/2021/results/auflia-single-query.html +++ b/archive/2021/results/auflia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    AUFLIA (Single Query Track)

    Competition results for the AUFLIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    AUFLIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) VampireVampirecvc5 - - + + Vampire - - + + Vampire - + @@ -131,7 +131,7 @@

    AUFLIA (Single Query Track)

    - + 2020-CVC4n 0 1354 @@ -142,7 +142,7 @@

    AUFLIA (Single Query Track)

    - + Vampire 0 1353 @@ -153,7 +153,7 @@

    AUFLIA (Single Query Track)

    - + Vampire - fixedn 0 1341 @@ -164,7 +164,7 @@

    AUFLIA (Single Query Track)

    - + 2020-Vampiren 0 1333 @@ -175,7 +175,7 @@

    AUFLIA (Single Query Track)

    - + cvc5 0 1332 @@ -186,7 +186,7 @@

    AUFLIA (Single Query Track)

    - + cvc5 - fixedn 0 1327 @@ -197,7 +197,7 @@

    AUFLIA (Single Query Track)

    - + z3n 0 1298 @@ -208,7 +208,7 @@

    AUFLIA (Single Query Track)

    - + 2020-z3n 0 1294 @@ -219,7 +219,7 @@

    AUFLIA (Single Query Track)

    - + 2018-CVC4n 0 1221 @@ -230,7 +230,7 @@

    AUFLIA (Single Query Track)

    - + veriT 0 1212 @@ -241,7 +241,7 @@

    AUFLIA (Single Query Track)

    - + SMTInterpol 0 1115 @@ -252,7 +252,7 @@

    AUFLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 47 @@ -274,7 +274,7 @@

    AUFLIA (Single Query Track)

    - + Vampire 0 1360374996.308340735.70413601001260278278 @@ -283,7 +283,7 @@

    AUFLIA (Single Query Track)

    - + 2020-CVC4n 0 1354330880.675331530.87113541371217284255 @@ -292,7 +292,7 @@

    AUFLIA (Single Query Track)

    - + Vampire - fixedn 0 1347362566.601338580.4061347981249291276 @@ -301,7 +301,7 @@

    AUFLIA (Single Query Track)

    - + 2020-Vampiren 0 1336360759.34339842.73113361001236302278 @@ -310,7 +310,7 @@

    AUFLIA (Single Query Track)

    - + cvc5 0 1332353609.935353975.68413321271205306281 @@ -319,7 +319,7 @@

    AUFLIA (Single Query Track)

    - + cvc5 - fixedn 0 1327356956.087357348.96413271291198311285 @@ -328,7 +328,7 @@

    AUFLIA (Single Query Track)

    - + z3n 0 1298378747.441379104.07512981761122340282 @@ -337,7 +337,7 @@

    AUFLIA (Single Query Track)

    - + 2020-z3n 0 1294386406.903386996.0712941741120344277 @@ -346,7 +346,7 @@

    AUFLIA (Single Query Track)

    - + 2018-CVC4n 0 1221337733.004338202.2011221981123417256 @@ -355,7 +355,7 @@

    AUFLIA (Single Query Track)

    - + veriT 0 1212312318.044312314.692121201212426254 @@ -364,7 +364,7 @@

    AUFLIA (Single Query Track)

    - + SMTInterpol 0 1115541595.304508228.0671115911024523387 @@ -373,7 +373,7 @@

    AUFLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 4758093.29849172.281471433159124 @@ -393,7 +393,7 @@

    AUFLIA (Single Query Track)

    - + z3n 0 1766825.1286825.64176176061456282 @@ -402,7 +402,7 @@

    AUFLIA (Single Query Track)

    - + 2020-z3n 0 1747373.0247373.867174174081456277 @@ -411,7 +411,7 @@

    AUFLIA (Single Query Track)

    - + 2020-CVC4n 0 13729136.93929273.5841371370451456255 @@ -420,7 +420,7 @@

    AUFLIA (Single Query Track)

    - + cvc5 - fixedn 0 12941033.50541176.2711291290531456285 @@ -429,7 +429,7 @@

    AUFLIA (Single Query Track)

    - + cvc5 0 12743824.13643904.5881271270551456281 @@ -438,7 +438,7 @@

    AUFLIA (Single Query Track)

    - + 2020-Vampiren 0 100102018.05298406.4711001000821456278 @@ -447,7 +447,7 @@

    AUFLIA (Single Query Track)

    - + Vampire 0 100105617.14998407.6451001000821456278 @@ -456,7 +456,7 @@

    AUFLIA (Single Query Track)

    - + 2018-CVC4n 0 9834620.15334732.11698980841456256 @@ -465,7 +465,7 @@

    AUFLIA (Single Query Track)

    - + Vampire - fixedn 0 9898415.52898410.71298980841456276 @@ -474,7 +474,7 @@

    AUFLIA (Single Query Track)

    - + SMTInterpol 0 9122104.50421914.97491910911456387 @@ -483,7 +483,7 @@

    AUFLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 142020.8681336.99714140168145624 @@ -492,7 +492,7 @@

    AUFLIA (Single Query Track)

    - + veriT 0 046446.43146446.2040001821456254 @@ -512,7 +512,7 @@

    AUFLIA (Single Query Track)

    - + Vampire 0 126052178.43932348.18912600126021357278 @@ -521,7 +521,7 @@

    AUFLIA (Single Query Track)

    - + Vampire - fixedn 0 124950550.87330177.35412490124932357276 @@ -530,7 +530,7 @@

    AUFLIA (Single Query Track)

    - + 2020-Vampiren 0 123648741.28831436.2612360123645357278 @@ -539,7 +539,7 @@

    AUFLIA (Single Query Track)

    - + 2020-CVC4n 0 121791743.73692257.28712170121764357255 @@ -548,7 +548,7 @@

    AUFLIA (Single Query Track)

    - + veriT 0 121287540.97687537.7312120121269357254 @@ -557,7 +557,7 @@

    AUFLIA (Single Query Track)

    - + cvc5 0 120599785.799100071.09612050120576357281 @@ -566,7 +566,7 @@

    AUFLIA (Single Query Track)

    - + cvc5 - fixedn 0 1198105922.582106172.69311980119883357285 @@ -575,7 +575,7 @@

    AUFLIA (Single Query Track)

    - + 2018-CVC4n 0 112393741.92694098.642112301123158357256 @@ -584,7 +584,7 @@

    AUFLIA (Single Query Track)

    - + z3n 0 1122175110.897175464.882112201122159357282 @@ -593,7 +593,7 @@

    AUFLIA (Single Query Track)

    - + 2020-z3n 0 1120178549.042178888.757112001120161357277 @@ -602,7 +602,7 @@

    AUFLIA (Single Query Track)

    - + SMTInterpol 0 1024338848.6312088.125102401024257357387 @@ -611,7 +611,7 @@

    AUFLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 3341746.15135780.68433033124835724 @@ -631,7 +631,7 @@

    AUFLIA (Single Query Track)

    - + Vampire 0 13329167.0487947.38313321001232306306 @@ -640,7 +640,7 @@

    AUFLIA (Single Query Track)

    - + Vampire - fixedn 0 13219104.0817891.7041321981223317304 @@ -649,7 +649,7 @@

    AUFLIA (Single Query Track)

    - + 2020-Vampiren 0 13098901.9217860.26713091001209329306 @@ -658,7 +658,7 @@

    AUFLIA (Single Query Track)

    - + z3n 0 12699218.4079218.65812691751094369366 @@ -667,7 +667,7 @@

    AUFLIA (Single Query Track)

    - + 2020-z3n 0 12649339.2699339.47912641741090374362 @@ -676,7 +676,7 @@

    AUFLIA (Single Query Track)

    - + cvc5 0 12209823.0919825.08612201001120418399 @@ -685,7 +685,7 @@

    AUFLIA (Single Query Track)

    - + cvc5 - fixedn 0 12209833.6849826.43912201001120418399 @@ -694,7 +694,7 @@

    AUFLIA (Single Query Track)

    - + 2018-CVC4n 0 121910202.84310198.2461219971122419411 @@ -703,7 +703,7 @@

    AUFLIA (Single Query Track)

    - + 2020-CVC4n 0 12159933.7979935.4291215991116423404 @@ -712,7 +712,7 @@

    AUFLIA (Single Query Track)

    - + veriT 0 11987561.4037557.218119801198440279 @@ -721,7 +721,7 @@

    AUFLIA (Single Query Track)

    - + SMTInterpol 0 103815378.82513550.304103891947600498 @@ -730,7 +730,7 @@

    AUFLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 4712640.8067743.004471433159185 @@ -754,7 +754,6 @@

    AUFLIA (Single Query Track)

    - + - diff --git a/archive/2021/results/auflia-unsat-core.html b/archive/2021/results/auflia-unsat-core.html index 2ee862f2..71431983 100644 --- a/archive/2021/results/auflia-unsat-core.html +++ b/archive/2021/results/auflia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    AUFLIA (Unsat Core Track)

    Competition results for the AUFLIA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    AUFLIA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5-uccvc5-uc - - + + @@ -126,7 +126,7 @@

    AUFLIA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 34339 @@ -137,7 +137,7 @@

    AUFLIA (Unsat Core Track)

    - + cvc5-uc 0 33832 @@ -148,7 +148,7 @@

    AUFLIA (Unsat Core Track)

    - + z3n 0 33002 @@ -159,7 +159,7 @@

    AUFLIA (Unsat Core Track)

    - + 2020-z3n 0 32486 @@ -170,7 +170,7 @@

    AUFLIA (Unsat Core Track)

    - + SMTInterpol 0 28163 @@ -181,7 +181,7 @@

    AUFLIA (Unsat Core Track)

    - + SMTInterpol-remus 0 25882 @@ -192,7 +192,7 @@

    AUFLIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 1 776 @@ -203,7 +203,7 @@

    AUFLIA (Unsat Core Track)

    - + Vampire 19 28445 @@ -225,7 +225,7 @@

    AUFLIA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 34339117514.564117515.892 @@ -234,7 +234,7 @@

    AUFLIA (Unsat Core Track)

    - + cvc5-uc 0 33832122286.681122219.04198 @@ -243,7 +243,7 @@

    AUFLIA (Unsat Core Track)

    - + z3n 0 33002117550.007118087.10382 @@ -252,7 +252,7 @@

    AUFLIA (Unsat Core Track)

    - + 2020-z3n 0 32486115044.098115045.25682 @@ -261,7 +261,7 @@

    AUFLIA (Unsat Core Track)

    - + SMTInterpol 0 28199283349.939250878.307179 @@ -270,7 +270,7 @@

    AUFLIA (Unsat Core Track)

    - + SMTInterpol-remus 0 27690591778.875525616.934181 @@ -279,7 +279,7 @@

    AUFLIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 1 77630704.14725716.35613 @@ -288,7 +288,7 @@

    AUFLIA (Unsat Core Track)

    - + Vampire 19 2855213381.6476279.5793 @@ -312,7 +312,6 @@

    AUFLIA (Unsat Core Track)

    - + - diff --git a/archive/2021/results/auflira-cloud.html b/archive/2021/results/auflira-cloud.html index 47ca6d51..51ae1184 100644 --- a/archive/2021/results/auflira-cloud.html +++ b/archive/2021/results/auflira-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    AUFLIRA (Cloud Track)

    Competition results for the AUFLIRA - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    AUFLIRA (Cloud Track)

    - + cvc5-gg 0 215603.9652021413 @@ -126,7 +126,7 @@

    AUFLIRA (Cloud Track)

    - + Vampire 0 116837.0911011414 @@ -135,7 +135,7 @@

    AUFLIRA (Cloud Track)

    - + Par4n 0 019200.00001616 @@ -155,7 +155,7 @@

    AUFLIRA (Cloud Track)

    - + cvc5-gg 0 00.000001613 @@ -164,7 +164,7 @@

    AUFLIRA (Cloud Track)

    - + Par4n 0 00.000001616 @@ -173,7 +173,7 @@

    AUFLIRA (Cloud Track)

    - + Vampire 0 00.000001614 @@ -193,7 +193,7 @@

    AUFLIRA (Cloud Track)

    - + cvc5-gg 0 21203.96520211313 @@ -202,7 +202,7 @@

    AUFLIRA (Cloud Track)

    - + Vampire 0 12437.09110121314 @@ -211,7 +211,7 @@

    AUFLIRA (Cloud Track)

    - + Par4n 0 03600.000031316 @@ -231,7 +231,7 @@

    AUFLIRA (Cloud Track)

    - + cvc5-gg 0 2315.9652021413 @@ -240,7 +240,7 @@

    AUFLIRA (Cloud Track)

    - + Vampire 0 0360.00001515 @@ -249,7 +249,7 @@

    AUFLIRA (Cloud Track)

    - + Par4n 0 0384.00001616 @@ -273,7 +273,6 @@

    AUFLIRA (Cloud Track)

    - + - diff --git a/archive/2021/results/auflira-parallel.html b/archive/2021/results/auflira-parallel.html index a312cfdb..bfbd2f15 100644 --- a/archive/2021/results/auflira-parallel.html +++ b/archive/2021/results/auflira-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    AUFLIRA (Parallel Track)

    Competition results for the AUFLIRA - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    AUFLIRA (Parallel Track)

    - + cvc5-gg 0 22403.948202152 @@ -126,7 +126,7 @@

    AUFLIRA (Parallel Track)

    - + Vampire 0 220110.3972021515 @@ -135,7 +135,7 @@

    AUFLIRA (Parallel Track)

    - + Par4n 0 020400.00001717 @@ -155,7 +155,7 @@

    AUFLIRA (Parallel Track)

    - + cvc5-gg 0 00.00000172 @@ -164,7 +164,7 @@

    AUFLIRA (Parallel Track)

    - + Par4n 0 00.000001717 @@ -173,7 +173,7 @@

    AUFLIRA (Parallel Track)

    - + Vampire 0 00.000001715 @@ -193,7 +193,7 @@

    AUFLIRA (Parallel Track)

    - + cvc5-gg 0 23.9482022132 @@ -202,7 +202,7 @@

    AUFLIRA (Parallel Track)

    - + Vampire 0 24510.39720221315 @@ -211,7 +211,7 @@

    AUFLIRA (Parallel Track)

    - + Par4n 0 04800.000041317 @@ -231,7 +231,7 @@

    AUFLIRA (Parallel Track)

    - + cvc5-gg 0 251.948202152 @@ -240,7 +240,7 @@

    AUFLIRA (Parallel Track)

    - + Par4n 0 0408.00001717 @@ -249,7 +249,7 @@

    AUFLIRA (Parallel Track)

    - + Vampire 0 0408.00001717 @@ -273,7 +273,6 @@

    AUFLIRA (Parallel Track)

    - + - diff --git a/archive/2021/results/auflira-single-query.html b/archive/2021/results/auflira-single-query.html index 4bd8eda2..1a818b19 100644 --- a/archive/2021/results/auflira-single-query.html +++ b/archive/2021/results/auflira-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    AUFLIRA (Single Query Track)

    Competition results for the AUFLIRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    AUFLIRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5Vampire— - - + + Vampire - - + + Vampire - + @@ -131,7 +131,7 @@

    AUFLIRA (Single Query Track)

    - + 2019-Par4n 0 1608 @@ -142,7 +142,7 @@

    AUFLIRA (Single Query Track)

    - + 2020-CVC4n 0 1562 @@ -153,7 +153,7 @@

    AUFLIRA (Single Query Track)

    - + cvc5 - fixedn 0 1555 @@ -164,7 +164,7 @@

    AUFLIRA (Single Query Track)

    - + z3n 0 1554 @@ -175,7 +175,7 @@

    AUFLIRA (Single Query Track)

    - + 2020-z3n 0 1554 @@ -186,7 +186,7 @@

    AUFLIRA (Single Query Track)

    - + cvc5 0 1553 @@ -197,7 +197,7 @@

    AUFLIRA (Single Query Track)

    - + Vampire 0 1533 @@ -208,7 +208,7 @@

    AUFLIRA (Single Query Track)

    - + Vampire - fixedn 0 1514 @@ -219,7 +219,7 @@

    AUFLIRA (Single Query Track)

    - + 2020-Vampiren 0 1510 @@ -230,7 +230,7 @@

    AUFLIRA (Single Query Track)

    - + veriT 0 1335 @@ -241,7 +241,7 @@

    AUFLIRA (Single Query Track)

    - + SMTInterpol 0 1244 @@ -252,7 +252,7 @@

    AUFLIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 12 @@ -274,7 +274,7 @@

    AUFLIRA (Single Query Track)

    - + 2019-Par4n 0 160890164.94590135.39616085315557574 @@ -283,7 +283,7 @@

    AUFLIRA (Single Query Track)

    - + 2020-CVC4n 0 1562149417.237149460.382156201562121120 @@ -292,7 +292,7 @@

    AUFLIRA (Single Query Track)

    - + cvc5 - fixedn 0 1555159144.564159237.778155501555128126 @@ -301,7 +301,7 @@

    AUFLIRA (Single Query Track)

    - + Vampire 0 1555210663.939168565.556155501555128128 @@ -310,7 +310,7 @@

    AUFLIRA (Single Query Track)

    - + z3n 0 1554110462.959110495.915540155412974 @@ -319,7 +319,7 @@

    AUFLIRA (Single Query Track)

    - + 2020-z3n 0 1554112307.408112310.50715540155412981 @@ -328,7 +328,7 @@

    AUFLIRA (Single Query Track)

    - + cvc5 0 1553161372.051161449.143155301553130128 @@ -337,7 +337,7 @@

    AUFLIRA (Single Query Track)

    - + Vampire - fixedn 0 1542215644.902162798.213154201542141120 @@ -346,7 +346,7 @@

    AUFLIRA (Single Query Track)

    - + 2020-Vampiren 0 1538209414.926160372.923153801538145120 @@ -355,7 +355,7 @@

    AUFLIRA (Single Query Track)

    - + veriT 0 1335416499.58416500.636133501335348274 @@ -364,7 +364,7 @@

    AUFLIRA (Single Query Track)

    - + SMTInterpol 0 1244539217.691530571.359124401244439434 @@ -373,7 +373,7 @@

    AUFLIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 1211125.047715.4581201216710 @@ -393,7 +393,7 @@

    AUFLIRA (Single Query Track)

    - + 2019-Par4n 0 531313.4271262.452535301162974 @@ -402,7 +402,7 @@

    AUFLIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0288.151181.4050005416290 @@ -411,7 +411,7 @@

    AUFLIRA (Single Query Track)

    - + 2020-z3n 0 055259.75855259.92500054162981 @@ -420,7 +420,7 @@

    AUFLIRA (Single Query Track)

    - + z3n 0 060040.96160040.97400054162974 @@ -429,7 +429,7 @@

    AUFLIRA (Single Query Track)

    - + veriT 0 063616.3263616.319000541629274 @@ -438,7 +438,7 @@

    AUFLIRA (Single Query Track)

    - + 2020-CVC4n 0 063763.47563764.346000541629120 @@ -447,7 +447,7 @@

    AUFLIRA (Single Query Track)

    - + cvc5 - fixedn 0 063771.82163774.162000541629126 @@ -456,7 +456,7 @@

    AUFLIRA (Single Query Track)

    - + cvc5 0 063773.9663776.311000541629128 @@ -465,7 +465,7 @@

    AUFLIRA (Single Query Track)

    - + 2020-Vampiren 0 064800.064800.0000541629120 @@ -474,7 +474,7 @@

    AUFLIRA (Single Query Track)

    - + SMTInterpol 0 064800.064800.0000541629434 @@ -483,7 +483,7 @@

    AUFLIRA (Single Query Track)

    - + Vampire 0 064800.064800.0000541629128 @@ -492,7 +492,7 @@

    AUFLIRA (Single Query Track)

    - + Vampire - fixedn 0 064800.064800.0000541629120 @@ -512,7 +512,7 @@

    AUFLIRA (Single Query Track)

    - + 2020-CVC4n 0 156211253.76211296.0361562015625116120 @@ -521,7 +521,7 @@

    AUFLIRA (Single Query Track)

    - + 2019-Par4n 0 155514451.51814472.9441555015551211674 @@ -530,7 +530,7 @@

    AUFLIRA (Single Query Track)

    - + cvc5 - fixedn 0 155520972.74321063.61615550155512116126 @@ -539,7 +539,7 @@

    AUFLIRA (Single Query Track)

    - + Vampire 0 155571463.93929365.55615550155512116128 @@ -548,7 +548,7 @@

    AUFLIRA (Single Query Track)

    - + z3n 0 155411886.06411886.941554015541311674 @@ -557,7 +557,7 @@

    AUFLIRA (Single Query Track)

    - + 2020-z3n 0 155415435.99715437.2441554015541311681 @@ -566,7 +566,7 @@

    AUFLIRA (Single Query Track)

    - + cvc5 0 155323198.09123272.83215530155314116128 @@ -575,7 +575,7 @@

    AUFLIRA (Single Query Track)

    - + Vampire - fixedn 0 154276444.90223598.21315420154225116120 @@ -584,7 +584,7 @@

    AUFLIRA (Single Query Track)

    - + 2020-Vampiren 0 153870214.92621172.92315380153829116120 @@ -593,7 +593,7 @@

    AUFLIRA (Single Query Track)

    - + veriT 0 1335278483.26278484.317133501335232116274 @@ -602,7 +602,7 @@

    AUFLIRA (Single Query Track)

    - + SMTInterpol 0 1244395619.351392493.691124401244323116434 @@ -611,7 +611,7 @@

    AUFLIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 1210525.6137345.5321201215551160 @@ -631,7 +631,7 @@

    AUFLIRA (Single Query Track)

    - + 2019-Par4n 0 16081964.9451935.39616085315557574 @@ -640,7 +640,7 @@

    AUFLIRA (Single Query Track)

    - + 2020-z3n 0 15542918.5892919.571155401554129116 @@ -649,7 +649,7 @@

    AUFLIRA (Single Query Track)

    - + z3n 0 15513091.3593091.555155101551132116 @@ -658,7 +658,7 @@

    AUFLIRA (Single Query Track)

    - + Vampire 0 15175733.8394582.249151701517166166 @@ -667,7 +667,7 @@

    AUFLIRA (Single Query Track)

    - + cvc5 - fixedn 0 14984556.2594555.536149801498185185 @@ -676,7 +676,7 @@

    AUFLIRA (Single Query Track)

    - + cvc5 0 14984556.5954556.113149801498185185 @@ -685,7 +685,7 @@

    AUFLIRA (Single Query Track)

    - + 2020-CVC4n 0 14984575.4484575.1149801498185185 @@ -694,7 +694,7 @@

    AUFLIRA (Single Query Track)

    - + Vampire - fixedn 0 14965764.6824584.774149601496187166 @@ -703,7 +703,7 @@

    AUFLIRA (Single Query Track)

    - + 2020-Vampiren 0 14905527.4694569.212149001490193168 @@ -712,7 +712,7 @@

    AUFLIRA (Single Query Track)

    - + veriT 0 13358427.588428.636133501335348274 @@ -721,7 +721,7 @@

    AUFLIRA (Single Query Track)

    - + SMTInterpol 0 122112225.97411732.255122101221462462 @@ -730,7 +730,7 @@

    AUFLIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 128773.045363.4581201216710 @@ -754,7 +754,6 @@

    AUFLIRA (Single Query Track)

    - + - diff --git a/archive/2021/results/auflira-unsat-core.html b/archive/2021/results/auflira-unsat-core.html index 72d223d8..22ff4677 100644 --- a/archive/2021/results/auflira-unsat-core.html +++ b/archive/2021/results/auflira-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    AUFLIRA (Unsat Core Track)

    Competition results for the AUFLIRA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    AUFLIRA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5-uccvc5-uc - - + + @@ -126,7 +126,7 @@

    AUFLIRA (Unsat Core Track)

    - + cvc5-uc 0 179166 @@ -137,7 +137,7 @@

    AUFLIRA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 179064 @@ -148,7 +148,7 @@

    AUFLIRA (Unsat Core Track)

    - + z3n 0 178856 @@ -159,7 +159,7 @@

    AUFLIRA (Unsat Core Track)

    - + 2020-z3n 0 178847 @@ -170,7 +170,7 @@

    AUFLIRA (Unsat Core Track)

    - + Vampire 0 167913 @@ -181,7 +181,7 @@

    AUFLIRA (Unsat Core Track)

    - + SMTInterpol 0 152680 @@ -192,7 +192,7 @@

    AUFLIRA (Unsat Core Track)

    - + SMTInterpol-remus 0 152356 @@ -203,7 +203,7 @@

    AUFLIRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 10309 @@ -225,7 +225,7 @@

    AUFLIRA (Unsat Core Track)

    - + cvc5-uc 0 17916678723.81578722.41965 @@ -234,7 +234,7 @@

    AUFLIRA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 17906477679.64577678.18364 @@ -243,7 +243,7 @@

    AUFLIRA (Unsat Core Track)

    - + z3n 0 17885610342.51610352.7924 @@ -252,7 +252,7 @@

    AUFLIRA (Unsat Core Track)

    - + 2020-z3n 0 17884714513.82314516.4993 @@ -261,7 +261,7 @@

    AUFLIRA (Unsat Core Track)

    - + Vampire 0 16796591455.03633095.30410 @@ -270,7 +270,7 @@

    AUFLIRA (Unsat Core Track)

    - + SMTInterpol-remus 0 1529181428797.7691283718.24421 @@ -279,7 +279,7 @@

    AUFLIRA (Unsat Core Track)

    - + SMTInterpol 0 152680531038.96521707.622422 @@ -288,7 +288,7 @@

    AUFLIRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 1030954856.1733923.3171 @@ -312,7 +312,6 @@

    AUFLIRA (Unsat Core Track)

    - + - diff --git a/archive/2021/results/aufnia-cloud.html b/archive/2021/results/aufnia-cloud.html index 5d4bdc29..49fa7d7b 100644 --- a/archive/2021/results/aufnia-cloud.html +++ b/archive/2021/results/aufnia-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    AUFNIA (Cloud Track)

    Competition results for the AUFNIA - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    AUFNIA (Cloud Track)

    - + Vampire 0 3922.22930300 @@ -126,7 +126,7 @@

    AUFNIA (Cloud Track)

    - + cvc5-gg 0 01200.000031 @@ -146,7 +146,7 @@

    AUFNIA (Cloud Track)

    - + cvc5-gg 0 00.0000031 @@ -155,7 +155,7 @@

    AUFNIA (Cloud Track)

    - + Vampire 0 00.0000030 @@ -175,7 +175,7 @@

    AUFNIA (Cloud Track)

    - + Vampire 0 3922.229303000 @@ -184,7 +184,7 @@

    AUFNIA (Cloud Track)

    - + cvc5-gg 0 01200.0000301 @@ -204,7 +204,7 @@

    AUFNIA (Cloud Track)

    - + cvc5-gg 0 024.000031 @@ -213,7 +213,7 @@

    AUFNIA (Cloud Track)

    - + Vampire 0 072.000033 @@ -237,7 +237,6 @@

    AUFNIA (Cloud Track)

    - + - diff --git a/archive/2021/results/aufnia-parallel.html b/archive/2021/results/aufnia-parallel.html index 7139a495..81ea1af4 100644 --- a/archive/2021/results/aufnia-parallel.html +++ b/archive/2021/results/aufnia-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    AUFNIA (Parallel Track)

    Competition results for the AUFNIA - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    AUFNIA (Parallel Track)

    - + cvc5-gg 0 01200.000031 @@ -126,7 +126,7 @@

    AUFNIA (Parallel Track)

    - + Vampire 0 03600.000033 @@ -146,7 +146,7 @@

    AUFNIA (Parallel Track)

    - + cvc5-gg 0 00.0000031 @@ -155,7 +155,7 @@

    AUFNIA (Parallel Track)

    - + Vampire 0 00.0000033 @@ -175,7 +175,7 @@

    AUFNIA (Parallel Track)

    - + cvc5-gg 0 01200.0000301 @@ -184,7 +184,7 @@

    AUFNIA (Parallel Track)

    - + Vampire 0 03600.0000303 @@ -204,7 +204,7 @@

    AUFNIA (Parallel Track)

    - + cvc5-gg 0 024.000031 @@ -213,7 +213,7 @@

    AUFNIA (Parallel Track)

    - + Vampire 0 072.000033 @@ -237,7 +237,6 @@

    AUFNIA (Parallel Track)

    - + - diff --git a/archive/2021/results/aufnia-single-query.html b/archive/2021/results/aufnia-single-query.html index fa855215..e3d0b2bb 100644 --- a/archive/2021/results/aufnia-single-query.html +++ b/archive/2021/results/aufnia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    AUFNIA (Single Query Track)

    Competition results for the AUFNIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    AUFNIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) ——— - - + + — - - + + — - + @@ -131,7 +131,7 @@

    AUFNIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -142,7 +142,7 @@

    AUFNIA (Single Query Track)

    - + cvc5 - fixedn 0 0 @@ -153,7 +153,7 @@

    AUFNIA (Single Query Track)

    - + cvc5 0 0 @@ -164,7 +164,7 @@

    AUFNIA (Single Query Track)

    - + 2020-CVC4n 0 0 @@ -175,7 +175,7 @@

    AUFNIA (Single Query Track)

    - + z3n 0 0 @@ -186,7 +186,7 @@

    AUFNIA (Single Query Track)

    - + 2020-Vampiren 0 0 @@ -197,7 +197,7 @@

    AUFNIA (Single Query Track)

    - + Vampire 0 0 @@ -208,7 +208,7 @@

    AUFNIA (Single Query Track)

    - + Vampire - fixedn 0 0 @@ -230,7 +230,7 @@

    AUFNIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 015.7758.88200030 @@ -239,7 +239,7 @@

    AUFNIA (Single Query Track)

    - + 2020-Vampiren 0 03600.03600.000033 @@ -248,7 +248,7 @@

    AUFNIA (Single Query Track)

    - + 2020-CVC4n 0 03600.03600.000033 @@ -257,7 +257,7 @@

    AUFNIA (Single Query Track)

    - + cvc5 0 03600.03600.000033 @@ -266,7 +266,7 @@

    AUFNIA (Single Query Track)

    - + Vampire 0 03600.03600.000033 @@ -275,7 +275,7 @@

    AUFNIA (Single Query Track)

    - + Vampire - fixedn 0 03600.03600.000033 @@ -284,7 +284,7 @@

    AUFNIA (Single Query Track)

    - + z3n 0 03600.03600.000033 @@ -293,7 +293,7 @@

    AUFNIA (Single Query Track)

    - + cvc5 - fixedn 0 03600.03600.000033 @@ -313,7 +313,7 @@

    AUFNIA (Single Query Track)

    - + 2020-Vampiren 0 00.00.0000033 @@ -322,7 +322,7 @@

    AUFNIA (Single Query Track)

    - + 2020-CVC4n 0 00.00.0000033 @@ -331,7 +331,7 @@

    AUFNIA (Single Query Track)

    - + cvc5 0 00.00.0000033 @@ -340,7 +340,7 @@

    AUFNIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.0000030 @@ -349,7 +349,7 @@

    AUFNIA (Single Query Track)

    - + Vampire 0 00.00.0000033 @@ -358,7 +358,7 @@

    AUFNIA (Single Query Track)

    - + Vampire - fixedn 0 00.00.0000033 @@ -367,7 +367,7 @@

    AUFNIA (Single Query Track)

    - + z3n 0 00.00.0000033 @@ -376,7 +376,7 @@

    AUFNIA (Single Query Track)

    - + cvc5 - fixedn 0 00.00.0000033 @@ -396,7 +396,7 @@

    AUFNIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 015.7758.882000300 @@ -405,7 +405,7 @@

    AUFNIA (Single Query Track)

    - + 2020-Vampiren 0 03600.03600.0000303 @@ -414,7 +414,7 @@

    AUFNIA (Single Query Track)

    - + 2020-CVC4n 0 03600.03600.0000303 @@ -423,7 +423,7 @@

    AUFNIA (Single Query Track)

    - + cvc5 0 03600.03600.0000303 @@ -432,7 +432,7 @@

    AUFNIA (Single Query Track)

    - + Vampire 0 03600.03600.0000303 @@ -441,7 +441,7 @@

    AUFNIA (Single Query Track)

    - + Vampire - fixedn 0 03600.03600.0000303 @@ -450,7 +450,7 @@

    AUFNIA (Single Query Track)

    - + z3n 0 03600.03600.0000303 @@ -459,7 +459,7 @@

    AUFNIA (Single Query Track)

    - + cvc5 - fixedn 0 03600.03600.0000303 @@ -479,7 +479,7 @@

    AUFNIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 015.7758.88200030 @@ -488,7 +488,7 @@

    AUFNIA (Single Query Track)

    - + 2020-Vampiren 0 072.072.000033 @@ -497,7 +497,7 @@

    AUFNIA (Single Query Track)

    - + 2020-CVC4n 0 072.072.000033 @@ -506,7 +506,7 @@

    AUFNIA (Single Query Track)

    - + cvc5 0 072.072.000033 @@ -515,7 +515,7 @@

    AUFNIA (Single Query Track)

    - + Vampire 0 072.072.000033 @@ -524,7 +524,7 @@

    AUFNIA (Single Query Track)

    - + Vampire - fixedn 0 072.072.000033 @@ -533,7 +533,7 @@

    AUFNIA (Single Query Track)

    - + z3n 0 072.072.000033 @@ -542,7 +542,7 @@

    AUFNIA (Single Query Track)

    - + cvc5 - fixedn 0 072.072.000033 @@ -566,7 +566,6 @@

    AUFNIA (Single Query Track)

    - + - diff --git a/archive/2021/results/aufnia-unsat-core.html b/archive/2021/results/aufnia-unsat-core.html index b3332737..b3baf72f 100644 --- a/archive/2021/results/aufnia-unsat-core.html +++ b/archive/2021/results/aufnia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    AUFNIA (Unsat Core Track)

    Competition results for the AUFNIA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    AUFNIA (Unsat Core Track)

    Sequential PerformanceParallel Performance —— - - + + @@ -126,7 +126,7 @@

    AUFNIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -137,7 +137,7 @@

    AUFNIA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 0 @@ -148,7 +148,7 @@

    AUFNIA (Unsat Core Track)

    - + cvc5-uc 0 0 @@ -159,7 +159,7 @@

    AUFNIA (Unsat Core Track)

    - + z3n 0 0 @@ -170,7 +170,7 @@

    AUFNIA (Unsat Core Track)

    - + Vampire 0 0 @@ -192,7 +192,7 @@

    AUFNIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 019.568.7890 @@ -201,7 +201,7 @@

    AUFNIA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 03600.03600.03 @@ -210,7 +210,7 @@

    AUFNIA (Unsat Core Track)

    - + cvc5-uc 0 03600.03600.03 @@ -219,7 +219,7 @@

    AUFNIA (Unsat Core Track)

    - + Vampire 0 03600.03600.03 @@ -228,7 +228,7 @@

    AUFNIA (Unsat Core Track)

    - + z3n 0 03600.03600.03 @@ -252,7 +252,6 @@

    AUFNIA (Unsat Core Track)

    - + - diff --git a/archive/2021/results/aufnira-cloud.html b/archive/2021/results/aufnira-cloud.html index be0a829f..f297900e 100644 --- a/archive/2021/results/aufnira-cloud.html +++ b/archive/2021/results/aufnira-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    AUFNIRA (Cloud Track)

    Competition results for the AUFNIRA - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    AUFNIRA (Cloud Track)

    - + Vampire 0 1011396.041001066 @@ -126,7 +126,7 @@

    AUFNIRA (Cloud Track)

    - + cvc5-gg 0 012000.00001610 @@ -135,7 +135,7 @@

    AUFNIRA (Cloud Track)

    - + Par4n 0 018000.00001515 @@ -155,7 +155,7 @@

    AUFNIRA (Cloud Track)

    - + cvc5-gg 0 00.000001610 @@ -164,7 +164,7 @@

    AUFNIRA (Cloud Track)

    - + Par4n 0 00.000001615 @@ -173,7 +173,7 @@

    AUFNIRA (Cloud Track)

    - + Vampire 0 00.00000166 @@ -193,7 +193,7 @@

    AUFNIRA (Cloud Track)

    - + Vampire 0 104196.0410010066 @@ -202,7 +202,7 @@

    AUFNIRA (Cloud Track)

    - + cvc5-gg 0 07200.000010610 @@ -211,7 +211,7 @@

    AUFNIRA (Cloud Track)

    - + Par4n 0 010800.00009715 @@ -231,7 +231,7 @@

    AUFNIRA (Cloud Track)

    - + Vampire 0 1364.0131011515 @@ -240,7 +240,7 @@

    AUFNIRA (Cloud Track)

    - + cvc5-gg 0 0240.00001610 @@ -249,7 +249,7 @@

    AUFNIRA (Cloud Track)

    - + Par4n 0 0360.00001515 @@ -273,7 +273,6 @@

    AUFNIRA (Cloud Track)

    - + - diff --git a/archive/2021/results/aufnira-incremental.html b/archive/2021/results/aufnira-incremental.html index dc29d8c5..ff4514e4 100644 --- a/archive/2021/results/aufnira-incremental.html +++ b/archive/2021/results/aufnira-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    AUFNIRA (Incremental Track)

    Competition results for the AUFNIRA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    AUFNIRA (Incremental Track)

    Parallel Performance cvc5-inc - - + + @@ -124,7 +124,7 @@

    AUFNIRA (Incremental Track)

    - + cvc5-inc 0 310826563.37726562.88834422 @@ -133,7 +133,7 @@

    AUFNIRA (Incremental Track)

    - + 2019-CVC4-incn 0 309427553.96427553.87735822 @@ -142,7 +142,7 @@

    AUFNIRA (Incremental Track)

    - + 2020-CVC4-incn 0 309427687.17327686.13535822 @@ -151,7 +151,7 @@

    AUFNIRA (Incremental Track)

    - + 2020-z3n 0 287545620.17845620.91457734 @@ -160,7 +160,7 @@

    AUFNIRA (Incremental Track)

    - + z3n 0 279147843.96447844.44766137 @@ -169,7 +169,7 @@

    AUFNIRA (Incremental Track)

    - + SMTInterpol 0 0202.574102.52634520 @@ -178,7 +178,7 @@

    AUFNIRA (Incremental Track)

    - + UltimateEliminator+MathSAT 0 0688.173313.70834520 @@ -202,7 +202,6 @@

    AUFNIRA (Incremental Track)

    - + - diff --git a/archive/2021/results/aufnira-parallel.html b/archive/2021/results/aufnira-parallel.html index b9f46acc..fb0b2b26 100644 --- a/archive/2021/results/aufnira-parallel.html +++ b/archive/2021/results/aufnira-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    AUFNIRA (Parallel Track)

    Competition results for the AUFNIRA - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    AUFNIRA (Parallel Track)

    - + Vampire 0 616670.2316061111 @@ -126,7 +126,7 @@

    AUFNIRA (Parallel Track)

    - + cvc5-gg 0 01200.0000171 @@ -135,7 +135,7 @@

    AUFNIRA (Parallel Track)

    - + Par4n 0 020400.00001717 @@ -155,7 +155,7 @@

    AUFNIRA (Parallel Track)

    - + cvc5-gg 0 00.00000171 @@ -164,7 +164,7 @@

    AUFNIRA (Parallel Track)

    - + Par4n 0 00.000001717 @@ -173,7 +173,7 @@

    AUFNIRA (Parallel Track)

    - + Vampire 0 00.000001711 @@ -193,7 +193,7 @@

    AUFNIRA (Parallel Track)

    - + Vampire 0 63470.23160601111 @@ -202,7 +202,7 @@

    AUFNIRA (Parallel Track)

    - + cvc5-gg 0 00.00006111 @@ -211,7 +211,7 @@

    AUFNIRA (Parallel Track)

    - + Par4n 0 07200.000061117 @@ -231,7 +231,7 @@

    AUFNIRA (Parallel Track)

    - + Vampire 0 1387.091011616 @@ -240,7 +240,7 @@

    AUFNIRA (Parallel Track)

    - + cvc5-gg 0 024.0000171 @@ -249,7 +249,7 @@

    AUFNIRA (Parallel Track)

    - + Par4n 0 0408.00001717 @@ -273,7 +273,6 @@

    AUFNIRA (Parallel Track)

    - + - diff --git a/archive/2021/results/aufnira-single-query.html b/archive/2021/results/aufnira-single-query.html index 48e6d584..96a2a528 100644 --- a/archive/2021/results/aufnira-single-query.html +++ b/archive/2021/results/aufnira-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    AUFNIRA (Single Query Track)

    Competition results for the AUFNIRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    AUFNIRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) VampireVampire— - - + + Vampire - - + + Vampire - + @@ -131,7 +131,7 @@

    AUFNIRA (Single Query Track)

    - + 2019-Par4n 0 56 @@ -142,7 +142,7 @@

    AUFNIRA (Single Query Track)

    - + 2020-CVC4n 0 52 @@ -153,7 +153,7 @@

    AUFNIRA (Single Query Track)

    - + Vampire - fixedn 0 47 @@ -164,7 +164,7 @@

    AUFNIRA (Single Query Track)

    - + Vampire 0 46 @@ -175,7 +175,7 @@

    AUFNIRA (Single Query Track)

    - + 2020-Vampiren 0 45 @@ -186,7 +186,7 @@

    AUFNIRA (Single Query Track)

    - + cvc5 - fixedn 0 42 @@ -197,7 +197,7 @@

    AUFNIRA (Single Query Track)

    - + cvc5 0 42 @@ -208,7 +208,7 @@

    AUFNIRA (Single Query Track)

    - + z3n 0 34 @@ -219,7 +219,7 @@

    AUFNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 1 @@ -241,7 +241,7 @@

    AUFNIRA (Single Query Track)

    - + 2019-Par4n 0 56300845.015296862.69756254244244 @@ -250,7 +250,7 @@

    AUFNIRA (Single Query Track)

    - + 2020-CVC4n 0 52298328.58298464.99452052248244 @@ -259,7 +259,7 @@

    AUFNIRA (Single Query Track)

    - + Vampire - fixedn 0 51319457.668300454.96451051249247 @@ -268,7 +268,7 @@

    AUFNIRA (Single Query Track)

    - + Vampire 0 51312150.045302215.10651051249249 @@ -277,7 +277,7 @@

    AUFNIRA (Single Query Track)

    - + 2020-Vampiren 0 49313550.197302600.31349049251250 @@ -286,7 +286,7 @@

    AUFNIRA (Single Query Track)

    - + cvc5 0 42308359.479308449.59442042258255 @@ -295,7 +295,7 @@

    AUFNIRA (Single Query Track)

    - + cvc5 - fixedn 0 42308379.173308454.11442042258255 @@ -304,7 +304,7 @@

    AUFNIRA (Single Query Track)

    - + z3n 0 34206398.814207678.7593433126697 @@ -313,7 +313,7 @@

    AUFNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 11532.334911.181012990 @@ -333,7 +333,7 @@

    AUFNIRA (Single Query Track)

    - + z3n 0 30.0590.06330029797 @@ -342,7 +342,7 @@

    AUFNIRA (Single Query Track)

    - + 2019-Par4n 0 21200.0131200.0362201297244 @@ -351,7 +351,7 @@

    AUFNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 015.5098.70600032970 @@ -360,7 +360,7 @@

    AUFNIRA (Single Query Track)

    - + 2020-CVC4n 0 01561.2241567.4650003297244 @@ -369,7 +369,7 @@

    AUFNIRA (Single Query Track)

    - + cvc5 - fixedn 0 02400.3062400.3220003297255 @@ -378,7 +378,7 @@

    AUFNIRA (Single Query Track)

    - + cvc5 0 02400.3032401.1160003297255 @@ -387,7 +387,7 @@

    AUFNIRA (Single Query Track)

    - + 2020-Vampiren 0 03600.03600.00003297250 @@ -396,7 +396,7 @@

    AUFNIRA (Single Query Track)

    - + Vampire 0 03600.03600.00003297249 @@ -405,7 +405,7 @@

    AUFNIRA (Single Query Track)

    - + Vampire - fixedn 0 03600.03600.00003297247 @@ -425,7 +425,7 @@

    AUFNIRA (Single Query Track)

    - + 2019-Par4n 0 5418845.00214862.661540549237244 @@ -434,7 +434,7 @@

    AUFNIRA (Single Query Track)

    - + 2020-CVC4n 0 5218363.9918495.1515205211237244 @@ -443,7 +443,7 @@

    AUFNIRA (Single Query Track)

    - + Vampire - fixedn 0 5131457.39816055.2145105112237247 @@ -452,7 +452,7 @@

    AUFNIRA (Single Query Track)

    - + Vampire 0 5127750.04517815.1065105112237249 @@ -461,7 +461,7 @@

    AUFNIRA (Single Query Track)

    - + 2020-Vampiren 0 4925550.05718202.1134904914237250 @@ -470,7 +470,7 @@

    AUFNIRA (Single Query Track)

    - + cvc5 0 4227555.89327645.1894204221237255 @@ -479,7 +479,7 @@

    AUFNIRA (Single Query Track)

    - + cvc5 - fixedn 0 4227575.66927650.5924204221237255 @@ -488,7 +488,7 @@

    AUFNIRA (Single Query Track)

    - + z3n 0 3126719.99726720.913310313223797 @@ -497,7 +497,7 @@

    AUFNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 1333.085200.146101622370 @@ -517,7 +517,7 @@

    AUFNIRA (Single Query Track)

    - + Vampire 0 426922.8416383.37842042258258 @@ -526,7 +526,7 @@

    AUFNIRA (Single Query Track)

    - + Vampire - fixedn 0 416938.7586377.93541041259258 @@ -535,7 +535,7 @@

    AUFNIRA (Single Query Track)

    - + 2020-Vampiren 0 396674.9746356.33739039261260 @@ -544,7 +544,7 @@

    AUFNIRA (Single Query Track)

    - + 2019-Par4n 0 366403.1696372.54936234264264 @@ -553,7 +553,7 @@

    AUFNIRA (Single Query Track)

    - + z3n 0 346377.6226377.0934331266261 @@ -562,7 +562,7 @@

    AUFNIRA (Single Query Track)

    - + 2020-CVC4n 0 286470.7856469.82628028272269 @@ -571,7 +571,7 @@

    AUFNIRA (Single Query Track)

    - + cvc5 - fixedn 0 276520.9886520.99527027273270 @@ -580,7 +580,7 @@

    AUFNIRA (Single Query Track)

    - + cvc5 0 276520.9816521.79627027273270 @@ -589,7 +589,7 @@

    AUFNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 11532.334911.181012990 @@ -613,7 +613,6 @@

    AUFNIRA (Single Query Track)

    - + - diff --git a/archive/2021/results/aufnira-unsat-core.html b/archive/2021/results/aufnira-unsat-core.html index 6c57c6b9..d15d1d5e 100644 --- a/archive/2021/results/aufnira-unsat-core.html +++ b/archive/2021/results/aufnira-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    AUFNIRA (Unsat Core Track)

    Competition results for the AUFNIRA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    AUFNIRA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5-uccvc5-uc - - + + @@ -126,7 +126,7 @@

    AUFNIRA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 18144 @@ -137,7 +137,7 @@

    AUFNIRA (Unsat Core Track)

    - + cvc5-uc 0 17861 @@ -148,7 +148,7 @@

    AUFNIRA (Unsat Core Track)

    - + z3n 0 16403 @@ -159,7 +159,7 @@

    AUFNIRA (Unsat Core Track)

    - + Vampire 0 15373 @@ -170,7 +170,7 @@

    AUFNIRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 312 @@ -192,7 +192,7 @@

    AUFNIRA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 181448401.5518401.7356 @@ -201,7 +201,7 @@

    AUFNIRA (Unsat Core Track)

    - + cvc5-uc 0 1786111005.50911002.8289 @@ -210,7 +210,7 @@

    AUFNIRA (Unsat Core Track)

    - + Vampire 0 1659117279.1069823.1296 @@ -219,7 +219,7 @@

    AUFNIRA (Unsat Core Track)

    - + z3n 0 164036704.6397227.9531 @@ -228,7 +228,7 @@

    AUFNIRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 3122712.3521564.6650 @@ -252,7 +252,6 @@

    AUFNIRA (Unsat Core Track)

    - + - diff --git a/archive/2021/results/biggest-lead-cloud.html b/archive/2021/results/biggest-lead-cloud.html index 261058c8..1dd8b5db 100644 --- a/archive/2021/results/biggest-lead-cloud.html +++ b/archive/2021/results/biggest-lead-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -105,7 +105,7 @@

    Parallel Performance

    - + STP-CMS-Cloud 6.0 @@ -119,7 +119,7 @@

    Parallel Performance

    - + Vampire 6.0 @@ -133,7 +133,7 @@

    Parallel Performance

    - + Vampire 5.33333333 @@ -147,7 +147,7 @@

    Parallel Performance

    - + Vampire 1.72727273 @@ -161,7 +161,7 @@

    Parallel Performance

    - + SMTS cube-and-conquer 1.1 @@ -175,7 +175,7 @@

    Parallel Performance

    - + SMTS portfolio 1.0 @@ -189,7 +189,7 @@

    Parallel Performance

    - + Vampire 0.75675676 @@ -227,7 +227,6 @@

    Parallel Performance

    - + - diff --git a/archive/2021/results/biggest-lead-incremental.html b/archive/2021/results/biggest-lead-incremental.html index fc3daadf..19d8f0fe 100644 --- a/archive/2021/results/biggest-lead-incremental.html +++ b/archive/2021/results/biggest-lead-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -90,27 +90,27 @@

    Winners

    - - + + - - - - + + + + - - + + - - - - + + + +
    Parallel Performance
    - + cvc5-inc - +
    @@ -130,7 +130,7 @@

    Parallel Performance

    - + cvc5-inc 1.8955216 @@ -144,7 +144,7 @@

    Parallel Performance

    - + cvc5-inc 1.54741623 @@ -158,7 +158,7 @@

    Parallel Performance

    - + cvc5-inc 1.52492383 @@ -172,7 +172,7 @@

    Parallel Performance

    - + SMTInterpol 1.2126333 @@ -186,7 +186,7 @@

    Parallel Performance

    - + Yices2 incremental 1.19983169 @@ -200,7 +200,7 @@

    Parallel Performance

    - + Yices2 incremental 1.17107731 @@ -214,7 +214,7 @@

    Parallel Performance

    - + cvc5-inc 1.09482759 @@ -228,7 +228,7 @@

    Parallel Performance

    - + SMTInterpol 1.04366348 @@ -242,7 +242,7 @@

    Parallel Performance

    - + OpenSMT 1.03060109 @@ -256,7 +256,7 @@

    Parallel Performance

    - + cvc5-inc 1.02316699 @@ -270,7 +270,7 @@

    Parallel Performance

    - + cvc5-inc 1.02121065 @@ -284,7 +284,7 @@

    Parallel Performance

    - + STP 1.00190392 @@ -298,7 +298,7 @@

    Parallel Performance

    - + cvc5-inc 1.0 @@ -312,7 +312,7 @@

    Parallel Performance

    - + cvc5-inc 1.0 @@ -326,7 +326,7 @@

    Parallel Performance

    - + cvc5-inc 0.88205496 @@ -364,7 +364,6 @@

    Parallel Performance

    - + - diff --git a/archive/2021/results/biggest-lead-model-validation.html b/archive/2021/results/biggest-lead-model-validation.html index 4450a528..96bec7cb 100644 --- a/archive/2021/results/biggest-lead-model-validation.html +++ b/archive/2021/results/biggest-lead-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -90,35 +90,35 @@

    Winners

    - + - - + + - - - - + + + + - + - - + + - - - - + + + +
    Sequential PerformanceParallel Performance
    - + cvc5-mv - + - + cvc5-mv - +

    Sequential Performance

    @@ -136,7 +136,7 @@

    Sequential Performance

    - + SMTInterpol 1.03642773 @@ -150,7 +150,7 @@

    Sequential Performance

    - + cvc5-mv 1.02123894 @@ -164,7 +164,7 @@

    Sequential Performance

    - + Yices2 model-validation 1.00530973 @@ -178,7 +178,7 @@

    Sequential Performance

    - + Bitwuzla 1.00472813 @@ -192,7 +192,7 @@

    Sequential Performance

    - + Yices2 model-validation 1.0 @@ -206,7 +206,7 @@

    Sequential Performance

    - + Yices2 model-validation 1.0 @@ -235,7 +235,7 @@

    Parallel Performance

    - + SMTInterpol 1.03760282 @@ -249,7 +249,7 @@

    Parallel Performance

    - + cvc5-mv 1.02123894 @@ -263,7 +263,7 @@

    Parallel Performance

    - + Yices2 model-validation 1.00530973 @@ -277,7 +277,7 @@

    Parallel Performance

    - + Bitwuzla 1.00472813 @@ -291,7 +291,7 @@

    Parallel Performance

    - + Yices2 model-validation 1.0 @@ -305,7 +305,7 @@

    Parallel Performance

    - + Yices2 model-validation 1.0 @@ -343,7 +343,6 @@

    Parallel Performance

    - + - diff --git a/archive/2021/results/biggest-lead-parallel.html b/archive/2021/results/biggest-lead-parallel.html index 1b69e611..c0bc045b 100644 --- a/archive/2021/results/biggest-lead-parallel.html +++ b/archive/2021/results/biggest-lead-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -105,7 +105,7 @@

    Parallel Performance

    - + Vampire 6.0 @@ -119,7 +119,7 @@

    Parallel Performance

    - + Vampire 6.0 @@ -133,7 +133,7 @@

    Parallel Performance

    - + STP-parallel 2.0 @@ -147,7 +147,7 @@

    Parallel Performance

    - + cvc5-gg 1.3 @@ -161,7 +161,7 @@

    Parallel Performance

    - + Vampire 0.69230769 @@ -199,7 +199,6 @@

    Parallel Performance

    - + - diff --git a/archive/2021/results/biggest-lead-single-query.html b/archive/2021/results/biggest-lead-single-query.html index 1893d721..b9149765 100644 --- a/archive/2021/results/biggest-lead-single-query.html +++ b/archive/2021/results/biggest-lead-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -90,59 +90,59 @@

    Winners

    - + - - + + - - + + - - + + - - + + - + - + - - + + - - + + - - + + - - + + - +
    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24 seconds Performance (parallel)
    - + cvc5 - + - + cvc5 - + - + cvc5 - + - + cvc5 - + - + cvc5 - +

    Sequential Performance

    @@ -160,7 +160,7 @@

    Sequential Performance

    - + cvc5 36.4375 @@ -174,7 +174,7 @@

    Sequential Performance

    - + cvc5 5.89669421 @@ -188,7 +188,7 @@

    Sequential Performance

    - + Vampire 1.97598628 @@ -202,7 +202,7 @@

    Sequential Performance

    - + cvc5 1.30319735 @@ -216,7 +216,7 @@

    Sequential Performance

    - + cvc5 1.20588235 @@ -230,7 +230,7 @@

    Sequential Performance

    - + cvc5 1.18613452 @@ -244,7 +244,7 @@

    Sequential Performance

    - + cvc5 1.12988827 @@ -258,7 +258,7 @@

    Sequential Performance

    - + cvc5 1.09827899 @@ -272,7 +272,7 @@

    Sequential Performance

    - + cvc5 1.09425134 @@ -286,7 +286,7 @@

    Sequential Performance

    - + Vampire 1.08749096 @@ -300,7 +300,7 @@

    Sequential Performance

    - + cvc5 1.06271543 @@ -314,7 +314,7 @@

    Sequential Performance

    - + cvc5 1.04891447 @@ -328,7 +328,7 @@

    Sequential Performance

    - + cvc5 1.04739944 @@ -342,7 +342,7 @@

    Sequential Performance

    - + cvc5 1.02825456 @@ -356,7 +356,7 @@

    Sequential Performance

    - + Bitwuzla 1.01278364 @@ -370,7 +370,7 @@

    Sequential Performance

    - + SMTInterpol 1.00954907 @@ -384,7 +384,7 @@

    Sequential Performance

    - + Yices2 1.00265957 @@ -398,7 +398,7 @@

    Sequential Performance

    - + Bitwuzla 1.00139551 @@ -427,7 +427,7 @@

    Parallel Performance

    - + cvc5 36.4375 @@ -441,7 +441,7 @@

    Parallel Performance

    - + cvc5 5.89669421 @@ -455,7 +455,7 @@

    Parallel Performance

    - + iProver 3.75095785 @@ -469,7 +469,7 @@

    Parallel Performance

    - + cvc5 1.30176211 @@ -483,7 +483,7 @@

    Parallel Performance

    - + cvc5 1.20588235 @@ -497,7 +497,7 @@

    Parallel Performance

    - + cvc5 1.18613452 @@ -511,7 +511,7 @@

    Parallel Performance

    - + Vampire 1.1373825 @@ -525,7 +525,7 @@

    Parallel Performance

    - + cvc5 1.12988827 @@ -539,7 +539,7 @@

    Parallel Performance

    - + cvc5 1.09827899 @@ -553,7 +553,7 @@

    Parallel Performance

    - + cvc5 1.09425134 @@ -567,7 +567,7 @@

    Parallel Performance

    - + cvc5 1.048092 @@ -581,7 +581,7 @@

    Parallel Performance

    - + cvc5 1.04739944 @@ -595,7 +595,7 @@

    Parallel Performance

    - + cvc5 1.03870173 @@ -609,7 +609,7 @@

    Parallel Performance

    - + cvc5 1.02825456 @@ -623,7 +623,7 @@

    Parallel Performance

    - + Bitwuzla 1.01278364 @@ -637,7 +637,7 @@

    Parallel Performance

    - + SMTInterpol 1.01007958 @@ -651,7 +651,7 @@

    Parallel Performance

    - + Yices2 1.00265957 @@ -665,7 +665,7 @@

    Parallel Performance

    - + Bitwuzla 1.00139551 @@ -694,7 +694,7 @@

    SAT Performance

    - + cvc5 11.57142857 @@ -708,7 +708,7 @@

    SAT Performance

    - + cvc5 5.21875 @@ -722,7 +722,7 @@

    SAT Performance

    - + UltimateEliminator+MathSAT 3.98809524 @@ -736,7 +736,7 @@

    SAT Performance

    - + cvc5 2.02702703 @@ -750,7 +750,7 @@

    SAT Performance

    - + Vampire 1.92405063 @@ -764,7 +764,7 @@

    SAT Performance

    - + cvc5 1.58035714 @@ -778,7 +778,7 @@

    SAT Performance

    - + cvc5 1.28882536 @@ -792,7 +792,7 @@

    SAT Performance

    - + cvc5 1.18595041 @@ -806,7 +806,7 @@

    SAT Performance

    - + cvc5 1.11106766 @@ -820,7 +820,7 @@

    SAT Performance

    - + cvc5 1.09864603 @@ -834,7 +834,7 @@

    SAT Performance

    - + cvc5 1.04 @@ -848,7 +848,7 @@

    SAT Performance

    - + cvc5 1.03987241 @@ -862,7 +862,7 @@

    SAT Performance

    - + cvc5 1.02692998 @@ -876,7 +876,7 @@

    SAT Performance

    - + SMTInterpol 1.01934236 @@ -890,7 +890,7 @@

    SAT Performance

    - + cvc5 1.01834431 @@ -904,7 +904,7 @@

    SAT Performance

    - + Yices2 1.01666667 @@ -918,7 +918,7 @@

    SAT Performance

    - + Bitwuzla 1.00885236 @@ -932,7 +932,7 @@

    SAT Performance

    - + Bitwuzla 1.00047755 @@ -961,7 +961,7 @@

    UNSAT Performance

    - + cvc5 77.0 @@ -975,7 +975,7 @@

    UNSAT Performance

    - + cvc5 6.11173184 @@ -989,7 +989,7 @@

    UNSAT Performance

    - + cvc5 1.98774259 @@ -1003,7 +1003,7 @@

    UNSAT Performance

    - + cvc5 1.27692308 @@ -1017,7 +1017,7 @@

    UNSAT Performance

    - + cvc5 1.15867159 @@ -1031,7 +1031,7 @@

    UNSAT Performance

    - + cvc5 1.13333333 @@ -1045,7 +1045,7 @@

    UNSAT Performance

    - + cvc5 1.09787234 @@ -1059,7 +1059,7 @@

    UNSAT Performance

    - + Yices2 1.07935872 @@ -1073,7 +1073,7 @@

    UNSAT Performance

    - + cvc5 1.06299213 @@ -1087,7 +1087,7 @@

    UNSAT Performance

    - + Vampire 1.04447853 @@ -1101,7 +1101,7 @@

    UNSAT Performance

    - + cvc5 1.04091456 @@ -1115,7 +1115,7 @@

    UNSAT Performance

    - + Bitwuzla 1.03764479 @@ -1129,7 +1129,7 @@

    UNSAT Performance

    - + cvc5 1.03204239 @@ -1143,7 +1143,7 @@

    UNSAT Performance

    - + cvc5 1.02960199 @@ -1157,7 +1157,7 @@

    UNSAT Performance

    - + cvc5 1.02593918 @@ -1171,7 +1171,7 @@

    UNSAT Performance

    - + cvc5 1.0152439 @@ -1185,7 +1185,7 @@

    UNSAT Performance

    - + Yices2 1.00221198 @@ -1199,7 +1199,7 @@

    UNSAT Performance

    - + cvc5 1.00117509 @@ -1228,7 +1228,7 @@

    24s Performance

    - + cvc5 32.71428571 @@ -1242,7 +1242,7 @@

    24s Performance

    - + cvc5 5.36864407 @@ -1256,7 +1256,7 @@

    24s Performance

    - + Yices2 1.58681818 @@ -1270,7 +1270,7 @@

    24s Performance

    - + Vampire 1.56305385 @@ -1284,7 +1284,7 @@

    24s Performance

    - + cvc5 1.38215712 @@ -1298,7 +1298,7 @@

    24s Performance

    - + cvc5 1.24125874 @@ -1312,7 +1312,7 @@

    24s Performance

    - + Vampire 1.23921971 @@ -1326,7 +1326,7 @@

    24s Performance

    - + cvc5 1.17667436 @@ -1340,7 +1340,7 @@

    24s Performance

    - + Yices2 1.16903579 @@ -1354,7 +1354,7 @@

    24s Performance

    - + cvc5 1.16129032 @@ -1368,7 +1368,7 @@

    24s Performance

    - + Yices2 1.11934901 @@ -1382,7 +1382,7 @@

    24s Performance

    - + cvc5 1.06744299 @@ -1396,7 +1396,7 @@

    24s Performance

    - + cvc5 1.03768116 @@ -1410,7 +1410,7 @@

    24s Performance

    - + SMTInterpol 1.03357997 @@ -1424,7 +1424,7 @@

    24s Performance

    - + cvc5 1.02002861 @@ -1438,7 +1438,7 @@

    24s Performance

    - + Yices2 1.00925436 @@ -1452,7 +1452,7 @@

    24s Performance

    - + Bitwuzla 1.00636303 @@ -1466,7 +1466,7 @@

    24s Performance

    - + Bitwuzla 1.00329782 @@ -1498,7 +1498,6 @@

    24s Performance

    - + - diff --git a/archive/2021/results/biggest-lead-unsat-core.html b/archive/2021/results/biggest-lead-unsat-core.html index c318d653..4905454c 100644 --- a/archive/2021/results/biggest-lead-unsat-core.html +++ b/archive/2021/results/biggest-lead-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -90,35 +90,35 @@

    Winners

    - + - - + + - - - - + + + + - + - - + + - - - - + + + +
    Sequential PerformanceParallel Performance
    - + cvc5-uc - + - + cvc5-uc - +

    Sequential Performance

    @@ -136,7 +136,7 @@

    Sequential Performance

    - + cvc5-uc 177624.0 @@ -150,7 +150,7 @@

    Sequential Performance

    - + cvc5-uc 80016.0 @@ -164,7 +164,7 @@

    Sequential Performance

    - + cvc5-uc 25289.0 @@ -178,7 +178,7 @@

    Sequential Performance

    - + cvc5-uc 8990.0 @@ -192,7 +192,7 @@

    Sequential Performance

    - + cvc5-uc 78.0 @@ -206,7 +206,7 @@

    Sequential Performance

    - + cvc5-uc 22.0 @@ -220,7 +220,7 @@

    Sequential Performance

    - + cvc5-uc 4.5 @@ -234,7 +234,7 @@

    Sequential Performance

    - + cvc5-uc 2.67934996 @@ -248,7 +248,7 @@

    Sequential Performance

    - + Yices2 2.19055857 @@ -262,7 +262,7 @@

    Sequential Performance

    - + cvc5-uc 1.80755576 @@ -276,7 +276,7 @@

    Sequential Performance

    - + cvc5-uc 1.22167359 @@ -290,7 +290,7 @@

    Sequential Performance

    - + cvc5-uc 1.15710605 @@ -304,7 +304,7 @@

    Sequential Performance

    - + cvc5-uc 1.10809979 @@ -318,7 +318,7 @@

    Sequential Performance

    - + Bitwuzla 1.07418125 @@ -332,7 +332,7 @@

    Sequential Performance

    - + Bitwuzla 1.07010066 @@ -346,7 +346,7 @@

    Sequential Performance

    - + Bitwuzla 1.01698493 @@ -360,7 +360,7 @@

    Sequential Performance

    - + Yices2 1.01460338 @@ -389,7 +389,7 @@

    Parallel Performance

    - + cvc5-uc 177624.0 @@ -403,7 +403,7 @@

    Parallel Performance

    - + cvc5-uc 80016.0 @@ -417,7 +417,7 @@

    Parallel Performance

    - + cvc5-uc 25289.0 @@ -431,7 +431,7 @@

    Parallel Performance

    - + cvc5-uc 8990.0 @@ -445,7 +445,7 @@

    Parallel Performance

    - + cvc5-uc 78.0 @@ -459,7 +459,7 @@

    Parallel Performance

    - + cvc5-uc 22.0 @@ -473,7 +473,7 @@

    Parallel Performance

    - + cvc5-uc 4.5 @@ -487,7 +487,7 @@

    Parallel Performance

    - + cvc5-uc 2.65398865 @@ -501,7 +501,7 @@

    Parallel Performance

    - + Yices2 2.05198284 @@ -515,7 +515,7 @@

    Parallel Performance

    - + cvc5-uc 1.80433097 @@ -529,7 +529,7 @@

    Parallel Performance

    - + cvc5-uc 1.18544704 @@ -543,7 +543,7 @@

    Parallel Performance

    - + cvc5-uc 1.11960708 @@ -557,7 +557,7 @@

    Parallel Performance

    - + cvc5-uc 1.10809979 @@ -571,7 +571,7 @@

    Parallel Performance

    - + Bitwuzla 1.07418125 @@ -585,7 +585,7 @@

    Parallel Performance

    - + Bitwuzla 1.07010066 @@ -599,7 +599,7 @@

    Parallel Performance

    - + Bitwuzla 1.01698493 @@ -613,7 +613,7 @@

    Parallel Performance

    - + Yices2 1.01460338 @@ -651,7 +651,6 @@

    Parallel Performance

    - + - diff --git a/archive/2021/results/bitvec-cloud.html b/archive/2021/results/bitvec-cloud.html index 61693e11..1f4f97e6 100644 --- a/archive/2021/results/bitvec-cloud.html +++ b/archive/2021/results/bitvec-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    Bitvec (Cloud Track)

    Competition results for the Bitvec - + division - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    Bitvec (Cloud Track)

    - + Par4n 0 614139.14865110010 @@ -126,7 +126,7 @@

    Bitvec (Cloud Track)

    - + cvc5-gg 0 26004.8622021405 @@ -146,7 +146,7 @@

    Bitvec (Cloud Track)

    - + Par4n 0 51390.61855001110 @@ -155,7 +155,7 @@

    Bitvec (Cloud Track)

    - + cvc5-gg 0 02400.00005115 @@ -175,7 +175,7 @@

    Bitvec (Cloud Track)

    - + cvc5-gg 0 21204.8622023115 @@ -184,7 +184,7 @@

    Bitvec (Cloud Track)

    - + Par4n 0 15548.5310141110 @@ -204,7 +204,7 @@

    Bitvec (Cloud Track)

    - + cvc5-gg 0 2124.8622021405 @@ -213,7 +213,7 @@

    Bitvec (Cloud Track)

    - + Par4n 0 2345.43822014014 @@ -237,7 +237,6 @@

    Bitvec (Cloud Track)

    - + - diff --git a/archive/2021/results/bitvec-incremental.html b/archive/2021/results/bitvec-incremental.html index d18b8f90..80a411e4 100644 --- a/archive/2021/results/bitvec-incremental.html +++ b/archive/2021/results/bitvec-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    Bitvec (Incremental Track)

    Competition results for the Bitvec - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    Bitvec (Incremental Track)

    Parallel Performance cvc5-inc - - + + @@ -124,7 +124,7 @@

    Bitvec (Incremental Track)

    - + 2019-Z3n 0 371957600.5747599.681166106 @@ -133,7 +133,7 @@

    Bitvec (Incremental Track)

    - + z3n 0 370346442.7886441.705182205 @@ -142,7 +142,7 @@

    Bitvec (Incremental Track)

    - + cvc5-inc 0 358499349.4049347.949300707 @@ -151,7 +151,7 @@

    Bitvec (Incremental Track)

    - + UltimateEliminator+MathSAT 0 189121547.8211391.6691994401 @@ -175,7 +175,6 @@

    Bitvec (Incremental Track)

    - + - diff --git a/archive/2021/results/bitvec-parallel.html b/archive/2021/results/bitvec-parallel.html index ec3701fa..785e9800 100644 --- a/archive/2021/results/bitvec-parallel.html +++ b/archive/2021/results/bitvec-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    Bitvec (Parallel Track)

    Competition results for the Bitvec - + division - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    Bitvec (Parallel Track)

    - + Par4n 0 714790.3775210010 @@ -126,7 +126,7 @@

    Bitvec (Parallel Track)

    - + cvc5-gg 0 27204.8922021506 @@ -146,7 +146,7 @@

    Bitvec (Parallel Track)

    - + Par4n 0 51220.755001210 @@ -155,7 +155,7 @@

    Bitvec (Parallel Track)

    - + cvc5-gg 0 02400.00005126 @@ -175,7 +175,7 @@

    Bitvec (Parallel Track)

    - + cvc5-gg 0 22404.8922024116 @@ -184,7 +184,7 @@

    Bitvec (Parallel Track)

    - + Par4n 0 26369.6720241110 @@ -204,7 +204,7 @@

    Bitvec (Parallel Track)

    - + cvc5-gg 0 2148.8922021506 @@ -213,7 +213,7 @@

    Bitvec (Parallel Track)

    - + Par4n 0 2369.59222015015 @@ -237,7 +237,6 @@

    Bitvec (Parallel Track)

    - + - diff --git a/archive/2021/results/bitvec-single-query.html b/archive/2021/results/bitvec-single-query.html index 209c72ad..fa7c91d7 100644 --- a/archive/2021/results/bitvec-single-query.html +++ b/archive/2021/results/bitvec-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    Bitvec (Single Query Track)

    Competition results for the Bitvec - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    Bitvec (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    Bitvec (Single Query Track)

    - + 2019-Par4n 0 891 @@ -142,7 +142,7 @@

    Bitvec (Single Query Track)

    - + cvc5 0 808 @@ -153,7 +153,7 @@

    Bitvec (Single Query Track)

    - + z3n 0 786 @@ -164,7 +164,7 @@

    Bitvec (Single Query Track)

    - + Yices2-QS 0 715 @@ -175,7 +175,7 @@

    Bitvec (Single Query Track)

    - + UltimateEliminator+MathSAT 0 306 @@ -197,7 +197,7 @@

    Bitvec (Single Query Track)

    - + 2019-Par4n 0 896115714.11794773.63489623266474074 @@ -206,7 +206,7 @@

    Bitvec (Single Query Track)

    - + cvc5 0 808231940.284232179.8778081816271620162 @@ -215,7 +215,7 @@

    Bitvec (Single Query Track)

    - + z3n 0 786217605.375217608.3247862055811840172 @@ -224,7 +224,7 @@

    Bitvec (Single Query Track)

    - + Yices2-QS 0 715310593.266310558.6217151745412550251 @@ -233,7 +233,7 @@

    Bitvec (Single Query Track)

    - + UltimateEliminator+MathSAT 0 306162589.399159362.04306212856640120 @@ -253,7 +253,7 @@

    Bitvec (Single Query Track)

    - + 2019-Par4n 0 23225298.22817174.3123223201372574 @@ -262,7 +262,7 @@

    Bitvec (Single Query Track)

    - + z3n 0 20544229.87444231.007205205040725172 @@ -271,7 +271,7 @@

    Bitvec (Single Query Track)

    - + cvc5 0 181108317.892108537.226181181064725162 @@ -280,7 +280,7 @@

    Bitvec (Single Query Track)

    - + Yices2-QS 0 17485034.35685034.833174174071725251 @@ -289,7 +289,7 @@

    Bitvec (Single Query Track)

    - + UltimateEliminator+MathSAT 0 2190859.73589597.29421210224725120 @@ -309,7 +309,7 @@

    Bitvec (Single Query Track)

    - + 2019-Par4n 0 66458015.88945199.32466406643427274 @@ -318,7 +318,7 @@

    Bitvec (Single Query Track)

    - + cvc5 0 62791222.39191242.651627062771272162 @@ -327,7 +327,7 @@

    Bitvec (Single Query Track)

    - + z3n 0 581141870.194141871.7395810581117272172 @@ -336,7 +336,7 @@

    Bitvec (Single Query Track)

    - + Yices2-QS 0 541193158.91193123.7885410541157272251 @@ -345,7 +345,7 @@

    Bitvec (Single Query Track)

    - + UltimateEliminator+MathSAT 0 28554828.36652978.2642850285413272120 @@ -365,7 +365,7 @@

    Bitvec (Single Query Track)

    - + 2019-Par4n 0 8704480.9512999.0868702236471000100 @@ -374,7 +374,7 @@

    Bitvec (Single Query Track)

    - + z3n 0 7585606.4265606.8937581975612120211 @@ -383,7 +383,7 @@

    Bitvec (Single Query Track)

    - + cvc5 0 7156952.0586941.517151225932550255 @@ -392,7 +392,7 @@

    Bitvec (Single Query Track)

    - + Yices2-QS 0 6897062.6027050.7446891665232810279 @@ -401,7 +401,7 @@

    Bitvec (Single Query Track)

    - + UltimateEliminator+MathSAT 0 2889189.4217311.011288122766820170 @@ -425,7 +425,6 @@

    Bitvec (Single Query Track)

    - + - diff --git a/archive/2021/results/bitvec-unsat-core.html b/archive/2021/results/bitvec-unsat-core.html index c7208279..4465f610 100644 --- a/archive/2021/results/bitvec-unsat-core.html +++ b/archive/2021/results/bitvec-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    Bitvec (Unsat Core Track)

    Competition results for the Bitvec - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    Bitvec (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5-uccvc5-uc - - + + @@ -126,7 +126,7 @@

    Bitvec (Unsat Core Track)

    - + 2020-CVC4-ucn 0 80 @@ -137,7 +137,7 @@

    Bitvec (Unsat Core Track)

    - + cvc5-uc 0 77 @@ -148,7 +148,7 @@

    Bitvec (Unsat Core Track)

    - + z3n 0 43 @@ -159,7 +159,7 @@

    Bitvec (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -181,7 +181,7 @@

    Bitvec (Unsat Core Track)

    - + 2020-CVC4-ucn 0 8094453.79194453.92778 @@ -190,7 +190,7 @@

    Bitvec (Unsat Core Track)

    - + cvc5-uc 0 7795984.77295982.55778 @@ -199,7 +199,7 @@

    Bitvec (Unsat Core Track)

    - + z3n 0 4328340.75928301.28920 @@ -208,7 +208,7 @@

    Bitvec (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 013102.18912243.8199 @@ -232,7 +232,6 @@

    Bitvec (Unsat Core Track)

    - + - diff --git a/archive/2021/results/bv-cloud.html b/archive/2021/results/bv-cloud.html index e5b36458..3f364331 100644 --- a/archive/2021/results/bv-cloud.html +++ b/archive/2021/results/bv-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    BV (Cloud Track)

    Competition results for the BV - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    BV (Cloud Track)

    - + Par4n 0 614139.1486511010 @@ -126,7 +126,7 @@

    BV (Cloud Track)

    - + cvc5-gg 0 26004.862202145 @@ -146,7 +146,7 @@

    BV (Cloud Track)

    - + Par4n 0 51390.61855001110 @@ -155,7 +155,7 @@

    BV (Cloud Track)

    - + cvc5-gg 0 02400.00005115 @@ -175,7 +175,7 @@

    BV (Cloud Track)

    - + cvc5-gg 0 21204.8622023115 @@ -184,7 +184,7 @@

    BV (Cloud Track)

    - + Par4n 0 15548.5310141110 @@ -204,7 +204,7 @@

    BV (Cloud Track)

    - + cvc5-gg 0 2124.862202145 @@ -213,7 +213,7 @@

    BV (Cloud Track)

    - + Par4n 0 2345.4382201414 @@ -237,7 +237,6 @@

    BV (Cloud Track)

    - + - diff --git a/archive/2021/results/bv-incremental.html b/archive/2021/results/bv-incremental.html index f18d0103..7365a1bb 100644 --- a/archive/2021/results/bv-incremental.html +++ b/archive/2021/results/bv-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    BV (Incremental Track)

    Competition results for the BV - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    BV (Incremental Track)

    Parallel Performance cvc5-inc - - + + @@ -124,7 +124,7 @@

    BV (Incremental Track)

    - + 2019-Z3n 0 371957600.5747599.68116616 @@ -133,7 +133,7 @@

    BV (Incremental Track)

    - + z3n 0 370346442.7886441.70518225 @@ -142,7 +142,7 @@

    BV (Incremental Track)

    - + cvc5-inc 0 358499349.4049347.94930077 @@ -151,7 +151,7 @@

    BV (Incremental Track)

    - + UltimateEliminator+MathSAT 0 189121547.8211391.669199441 @@ -175,7 +175,6 @@

    BV (Incremental Track)

    - + - diff --git a/archive/2021/results/bv-parallel.html b/archive/2021/results/bv-parallel.html index 87e03501..bd499147 100644 --- a/archive/2021/results/bv-parallel.html +++ b/archive/2021/results/bv-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    BV (Parallel Track)

    Competition results for the BV - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    BV (Parallel Track)

    - + Par4n 0 714790.377521010 @@ -126,7 +126,7 @@

    BV (Parallel Track)

    - + cvc5-gg 0 27204.892202156 @@ -146,7 +146,7 @@

    BV (Parallel Track)

    - + Par4n 0 51220.755001210 @@ -155,7 +155,7 @@

    BV (Parallel Track)

    - + cvc5-gg 0 02400.00005126 @@ -175,7 +175,7 @@

    BV (Parallel Track)

    - + cvc5-gg 0 22404.8922024116 @@ -184,7 +184,7 @@

    BV (Parallel Track)

    - + Par4n 0 26369.6720241110 @@ -204,7 +204,7 @@

    BV (Parallel Track)

    - + cvc5-gg 0 2148.892202156 @@ -213,7 +213,7 @@

    BV (Parallel Track)

    - + Par4n 0 2369.5922201515 @@ -237,7 +237,6 @@

    BV (Parallel Track)

    - + - diff --git a/archive/2021/results/bv-single-query.html b/archive/2021/results/bv-single-query.html index 32d96dda..dd68b33b 100644 --- a/archive/2021/results/bv-single-query.html +++ b/archive/2021/results/bv-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    BV (Single Query Track)

    Competition results for the BV - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    BV (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    BV (Single Query Track)

    - + 2019-Par4n 0 891 @@ -142,7 +142,7 @@

    BV (Single Query Track)

    - + cvc5 0 808 @@ -153,7 +153,7 @@

    BV (Single Query Track)

    - + z3n 0 786 @@ -164,7 +164,7 @@

    BV (Single Query Track)

    - + Yices2-QS 0 715 @@ -175,7 +175,7 @@

    BV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 306 @@ -197,7 +197,7 @@

    BV (Single Query Track)

    - + 2019-Par4n 0 896115714.11794773.6348962326647474 @@ -206,7 +206,7 @@

    BV (Single Query Track)

    - + cvc5 0 808231940.284232179.877808181627162162 @@ -215,7 +215,7 @@

    BV (Single Query Track)

    - + z3n 0 786217605.375217608.324786205581184172 @@ -224,7 +224,7 @@

    BV (Single Query Track)

    - + Yices2-QS 0 715310593.266310558.621715174541255251 @@ -233,7 +233,7 @@

    BV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 306162589.399159362.0430621285664120 @@ -253,7 +253,7 @@

    BV (Single Query Track)

    - + 2019-Par4n 0 23225298.22817174.3123223201372574 @@ -262,7 +262,7 @@

    BV (Single Query Track)

    - + z3n 0 20544229.87444231.007205205040725172 @@ -271,7 +271,7 @@

    BV (Single Query Track)

    - + cvc5 0 181108317.892108537.226181181064725162 @@ -280,7 +280,7 @@

    BV (Single Query Track)

    - + Yices2-QS 0 17485034.35685034.833174174071725251 @@ -289,7 +289,7 @@

    BV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 2190859.73589597.29421210224725120 @@ -309,7 +309,7 @@

    BV (Single Query Track)

    - + 2019-Par4n 0 66458015.88945199.32466406643427274 @@ -318,7 +318,7 @@

    BV (Single Query Track)

    - + cvc5 0 62791222.39191242.651627062771272162 @@ -327,7 +327,7 @@

    BV (Single Query Track)

    - + z3n 0 581141870.194141871.7395810581117272172 @@ -336,7 +336,7 @@

    BV (Single Query Track)

    - + Yices2-QS 0 541193158.91193123.7885410541157272251 @@ -345,7 +345,7 @@

    BV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 28554828.36652978.2642850285413272120 @@ -365,7 +365,7 @@

    BV (Single Query Track)

    - + 2019-Par4n 0 8704480.9512999.086870223647100100 @@ -374,7 +374,7 @@

    BV (Single Query Track)

    - + z3n 0 7585606.4265606.893758197561212211 @@ -383,7 +383,7 @@

    BV (Single Query Track)

    - + cvc5 0 7156952.0586941.51715122593255255 @@ -392,7 +392,7 @@

    BV (Single Query Track)

    - + Yices2-QS 0 6897062.6027050.744689166523281279 @@ -401,7 +401,7 @@

    BV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 2889189.4217311.01128812276682170 @@ -425,7 +425,6 @@

    BV (Single Query Track)

    - + - diff --git a/archive/2021/results/bv-unsat-core.html b/archive/2021/results/bv-unsat-core.html index 54a65579..705a008f 100644 --- a/archive/2021/results/bv-unsat-core.html +++ b/archive/2021/results/bv-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    BV (Unsat Core Track)

    Competition results for the BV - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    BV (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5-uccvc5-uc - - + + @@ -126,7 +126,7 @@

    BV (Unsat Core Track)

    - + 2020-CVC4-ucn 0 80 @@ -137,7 +137,7 @@

    BV (Unsat Core Track)

    - + cvc5-uc 0 77 @@ -148,7 +148,7 @@

    BV (Unsat Core Track)

    - + z3n 0 43 @@ -159,7 +159,7 @@

    BV (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -181,7 +181,7 @@

    BV (Unsat Core Track)

    - + 2020-CVC4-ucn 0 8094453.79194453.92778 @@ -190,7 +190,7 @@

    BV (Unsat Core Track)

    - + cvc5-uc 0 7795984.77295982.55778 @@ -199,7 +199,7 @@

    BV (Unsat Core Track)

    - + z3n 0 4328340.75928301.28920 @@ -208,7 +208,7 @@

    BV (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 013102.18912243.8199 @@ -232,7 +232,6 @@

    BV (Unsat Core Track)

    - + - diff --git a/archive/2021/results/bvfp-incremental.html b/archive/2021/results/bvfp-incremental.html index 55b1997f..39735559 100644 --- a/archive/2021/results/bvfp-incremental.html +++ b/archive/2021/results/bvfp-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    BVFP (Incremental Track)

    Competition results for the BVFP - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    BVFP (Incremental Track)

    Parallel Performance cvc5-inc - - + + @@ -124,7 +124,7 @@

    BVFP (Incremental Track)

    - + 2019-CVC4-incn 0 2701200.01200.01881 @@ -133,7 +133,7 @@

    BVFP (Incremental Track)

    - + cvc5-inc 0 2531200.01200.02051 @@ -142,7 +142,7 @@

    BVFP (Incremental Track)

    - + UltimateEliminator+MathSAT 0 23150.6744.0622270 @@ -166,7 +166,6 @@

    BVFP (Incremental Track)

    - + - diff --git a/archive/2021/results/bvfp-single-query.html b/archive/2021/results/bvfp-single-query.html index 95ebae03..18da05c9 100644 --- a/archive/2021/results/bvfp-single-query.html +++ b/archive/2021/results/bvfp-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    BVFP (Single Query Track)

    Competition results for the BVFP - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    BVFP (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    BVFP (Single Query Track)

    - + cvc5 0 127 @@ -142,7 +142,7 @@

    BVFP (Single Query Track)

    - + 2020-CVC4n 0 109 @@ -153,7 +153,7 @@

    BVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 22 @@ -164,7 +164,7 @@

    BVFP (Single Query Track)

    - + 2019-Z3n 0 0 @@ -186,7 +186,7 @@

    BVFP (Single Query Track)

    - + cvc5 0 12795556.20495559.05712712077878 @@ -195,7 +195,7 @@

    BVFP (Single Query Track)

    - + 2020-CVC4n 0 10921066.10221066.16410910549617 @@ -204,7 +204,7 @@

    BVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 221367.4972.163222201830 @@ -213,7 +213,7 @@

    BVFP (Single Query Track)

    - + 2019-Z3n 0 041290.80141293.14900020528 @@ -233,7 +233,7 @@

    BVFP (Single Query Track)

    - + cvc5 0 1204833.2324835.83120120038278 @@ -242,7 +242,7 @@

    BVFP (Single Query Track)

    - + 2020-CVC4n 0 1056027.6376027.6111051050188217 @@ -251,7 +251,7 @@

    BVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 22918.98669.12822220101820 @@ -260,7 +260,7 @@

    BVFP (Single Query Track)

    - + 2019-Z3n 0 021732.15421733.5820001238228 @@ -280,7 +280,7 @@

    BVFP (Single Query Track)

    - + cvc5 0 7722.971723.227707019878 @@ -289,7 +289,7 @@

    BVFP (Single Query Track)

    - + 2020-CVC4n 0 4579.547579.64404319817 @@ -298,7 +298,7 @@

    BVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 042.22241.14700071980 @@ -307,7 +307,7 @@

    BVFP (Single Query Track)

    - + 2019-Z3n 0 03260.4783260.654000719828 @@ -327,7 +327,7 @@

    BVFP (Single Query Track)

    - + cvc5 0 1192128.972128.92211911638686 @@ -336,7 +336,7 @@

    BVFP (Single Query Track)

    - + 2020-CVC4n 0 105591.306591.274105105010021 @@ -345,7 +345,7 @@

    BVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 161263.18883.305161601896 @@ -354,7 +354,7 @@

    BVFP (Single Query Track)

    - + 2019-Z3n 0 01686.0111686.07800020550 @@ -378,7 +378,6 @@

    BVFP (Single Query Track)

    - + - diff --git a/archive/2021/results/bvfplra-single-query.html b/archive/2021/results/bvfplra-single-query.html index 59c4a3f6..0d177ad5 100644 --- a/archive/2021/results/bvfplra-single-query.html +++ b/archive/2021/results/bvfplra-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    BVFPLRA (Single Query Track)

    Competition results for the BVFPLRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    BVFPLRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    BVFPLRA (Single Query Track)

    - + cvc5 0 96 @@ -142,7 +142,7 @@

    BVFPLRA (Single Query Track)

    - + 2020-CVC4n 0 92 @@ -153,7 +153,7 @@

    BVFPLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 38 @@ -175,7 +175,7 @@

    BVFPLRA (Single Query Track)

    - + cvc5 0 9666069.49366069.453969158955 @@ -184,7 +184,7 @@

    BVFPLRA (Single Query Track)

    - + 2020-CVC4n 0 9266035.19366035.176928759355 @@ -193,7 +193,7 @@

    BVFPLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 381315.147967.172383351470 @@ -213,7 +213,7 @@

    BVFPLRA (Single Query Track)

    - + cvc5 0 916040.7296040.6949191068855 @@ -222,7 +222,7 @@

    BVFPLRA (Single Query Track)

    - + 2020-CVC4n 0 876019.1546019.14187870108855 @@ -231,7 +231,7 @@

    BVFPLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 33695.794517.0823333064880 @@ -251,7 +251,7 @@

    BVFPLRA (Single Query Track)

    - + 2020-CVC4n 0 57.9097.908505018055 @@ -260,7 +260,7 @@

    BVFPLRA (Single Query Track)

    - + cvc5 0 518.73218.735505018055 @@ -269,7 +269,7 @@

    BVFPLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 548.28537.36650501800 @@ -289,7 +289,7 @@

    BVFPLRA (Single Query Track)

    - + cvc5 0 961389.4931389.453969158955 @@ -298,7 +298,7 @@

    BVFPLRA (Single Query Track)

    - + 2020-CVC4n 0 921355.1931355.176928759355 @@ -307,7 +307,7 @@

    BVFPLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 381290.979948.772383351473 @@ -331,7 +331,6 @@

    BVFPLRA (Single Query Track)

    - + - diff --git a/archive/2021/results/bvfplra-unsat-core.html b/archive/2021/results/bvfplra-unsat-core.html index eac6f593..8b1e16ec 100644 --- a/archive/2021/results/bvfplra-unsat-core.html +++ b/archive/2021/results/bvfplra-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    BVFPLRA (Unsat Core Track)

    Competition results for the BVFPLRA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    BVFPLRA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5-uccvc5-uc - - + + @@ -126,7 +126,7 @@

    BVFPLRA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 21 @@ -137,7 +137,7 @@

    BVFPLRA (Unsat Core Track)

    - + cvc5-uc 0 21 @@ -148,7 +148,7 @@

    BVFPLRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -170,7 +170,7 @@

    BVFPLRA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 211.7991.7990 @@ -179,7 +179,7 @@

    BVFPLRA (Unsat Core Track)

    - + cvc5-uc 0 215.6585.6580 @@ -188,7 +188,7 @@

    BVFPLRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 04.652.8780 @@ -212,7 +212,6 @@

    BVFPLRA (Unsat Core Track)

    - + - diff --git a/archive/2021/results/equality-cloud.html b/archive/2021/results/equality-cloud.html index 37775a2c..e45e0232 100644 --- a/archive/2021/results/equality-cloud.html +++ b/archive/2021/results/equality-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    Equality (Cloud Track)

    Competition results for the Equality - + division - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    Equality (Cloud Track)

    - + Vampire 0 1525283.889158717017 @@ -126,7 +126,7 @@

    Equality (Cloud Track)

    - + Par4n 0 217564.043211141614 @@ -135,7 +135,7 @@

    Equality (Cloud Track)

    - + cvc5-gg 0 230004.15120230025 @@ -155,7 +155,7 @@

    Equality (Cloud Track)

    - + Vampire 0 85905.36588012317 @@ -164,7 +164,7 @@

    Equality (Cloud Track)

    - + Par4n 0 14173.40711032814 @@ -173,7 +173,7 @@

    Equality (Cloud Track)

    - + cvc5-gg 0 07200.000092325 @@ -193,7 +193,7 @@

    Equality (Cloud Track)

    - + Vampire 0 72578.52470722317 @@ -202,7 +202,7 @@

    Equality (Cloud Track)

    - + cvc5-gg 0 28404.15120272325 @@ -211,7 +211,7 @@

    Equality (Cloud Track)

    - + Par4n 0 13790.63610132814 @@ -231,7 +231,7 @@

    Equality (Cloud Track)

    - + Vampire 0 7676.2672525025 @@ -240,7 +240,7 @@

    Equality (Cloud Track)

    - + cvc5-gg 0 2604.15120230025 @@ -249,7 +249,7 @@

    Equality (Cloud Track)

    - + Par4n 0 0384.0000161616 @@ -273,7 +273,6 @@

    Equality (Cloud Track)

    - + - diff --git a/archive/2021/results/equality-incremental.html b/archive/2021/results/equality-incremental.html index 61b1eff6..43678ce2 100644 --- a/archive/2021/results/equality-incremental.html +++ b/archive/2021/results/equality-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    Equality (Incremental Track)

    Competition results for the Equality - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    Equality (Incremental Track)

    Parallel Performance cvc5-inc - - + + @@ -124,7 +124,7 @@

    Equality (Incremental Track)

    - + 2020-z3n 0 977721187227.5641187144.8952603260920 @@ -133,7 +133,7 @@

    Equality (Incremental Track)

    - + z3n 0 973151195413.481195294.1152607830919 @@ -142,7 +142,7 @@

    Equality (Incremental Track)

    - + cvc5-inc 0 23524999859.822999780.8023345740823 @@ -151,7 +151,7 @@

    Equality (Incremental Track)

    - + SMTInterpol 0 154262110948.1982088016.64134267201662 @@ -160,7 +160,7 @@

    Equality (Incremental Track)

    - + UltimateEliminator+MathSAT 0 08917.93931.14435809800 @@ -184,7 +184,6 @@

    Equality (Incremental Track)

    - + - diff --git a/archive/2021/results/equality-lineararith-cloud.html b/archive/2021/results/equality-lineararith-cloud.html index 70c9b780..89c0ebe4 100644 --- a/archive/2021/results/equality-lineararith-cloud.html +++ b/archive/2021/results/equality-lineararith-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    Equality+LinearArith (Cloud Track)

    Competition results for the Equality+LinearArith - + division - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    Equality+LinearArith (Cloud Track)

    - + Vampire 0 2793356.692702775575 @@ -126,7 +126,7 @@

    Equality+LinearArith (Cloud Track)

    - + Par4n 0 446983.799413376637 @@ -135,7 +135,7 @@

    Equality+LinearArith (Cloud Track)

    - + cvc5-gg 2 3678078.1113603671065 @@ -155,7 +155,7 @@

    Equality+LinearArith (Cloud Track)

    - + Par4n 0 1480.354110010637 @@ -164,7 +164,7 @@

    Equality+LinearArith (Cloud Track)

    - + cvc5-gg 0 01200.0000310465 @@ -173,7 +173,7 @@

    Equality+LinearArith (Cloud Track)

    - + Vampire 0 03600.0000310475 @@ -193,7 +193,7 @@

    Equality+LinearArith (Cloud Track)

    - + Vampire 0 2735756.6927027275375 @@ -202,7 +202,7 @@

    Equality+LinearArith (Cloud Track)

    - + Par4n 0 317703.445303139137 @@ -211,7 +211,7 @@

    Equality+LinearArith (Cloud Track)

    - + cvc5-gg 2 3620478.11136036195265 @@ -231,7 +231,7 @@

    Equality+LinearArith (Cloud Track)

    - + Vampire 0 82288.10280894594 @@ -240,7 +240,7 @@

    Equality+LinearArith (Cloud Track)

    - + Par4n 0 0984.0000416641 @@ -249,7 +249,7 @@

    Equality+LinearArith (Cloud Track)

    - + cvc5-gg 2 361638.1113603671065 @@ -273,7 +273,6 @@

    Equality+LinearArith (Cloud Track)

    - + - diff --git a/archive/2021/results/equality-lineararith-incremental.html b/archive/2021/results/equality-lineararith-incremental.html index 891ffa85..6ba0e9cf 100644 --- a/archive/2021/results/equality-lineararith-incremental.html +++ b/archive/2021/results/equality-lineararith-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    Equality+LinearArith (Incremental Track)

    Competition results for the Equality+LinearArith - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    Equality+LinearArith (Incremental Track)

    Parallel Performance cvc5-inc - - + + @@ -124,7 +124,7 @@

    Equality+LinearArith (Incremental Track)

    - + z3n 0 559428154566.962154340.669236989063 @@ -133,7 +133,7 @@

    Equality+LinearArith (Incremental Track)

    - + 2020-z3n 0 558345154812.845154662.365238072064 @@ -142,7 +142,7 @@

    Equality+LinearArith (Incremental Track)

    - + 2019-Z3n 0 358440151094.059150877.017235425057 @@ -151,7 +151,7 @@

    Equality+LinearArith (Incremental Track)

    - + cvc5-inc 0 33940645186.66944994.149457011020 @@ -160,7 +160,7 @@

    Equality+LinearArith (Incremental Track)

    - + SMTInterpol 0 331721378737.828371628.3494646960224 @@ -169,7 +169,7 @@

    Equality+LinearArith (Incremental Track)

    - + UltimateEliminator+MathSAT 0 18782514707.58711363.77760859203 @@ -193,7 +193,6 @@

    Equality+LinearArith (Incremental Track)

    - + - diff --git a/archive/2021/results/equality-lineararith-parallel.html b/archive/2021/results/equality-lineararith-parallel.html index 97f45b16..c1acb805 100644 --- a/archive/2021/results/equality-lineararith-parallel.html +++ b/archive/2021/results/equality-lineararith-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    Equality+LinearArith (Parallel Track)

    Competition results for the Equality+LinearArith - + division - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    Equality+LinearArith (Parallel Track)

    - + Vampire 0 26108563.4042602687087 @@ -126,7 +126,7 @@

    Equality+LinearArith (Parallel Track)

    - + Par4n 0 449379.653413397039 @@ -135,7 +135,7 @@

    Equality+LinearArith (Parallel Track)

    - + cvc5-gg 2 3834880.0143803875029 @@ -155,7 +155,7 @@

    Equality+LinearArith (Parallel Track)

    - + Par4n 0 1480.297110011239 @@ -164,7 +164,7 @@

    Equality+LinearArith (Parallel Track)

    - + cvc5-gg 0 01200.0000311029 @@ -173,7 +173,7 @@

    Equality+LinearArith (Parallel Track)

    - + Vampire 0 03600.0000311087 @@ -193,7 +193,7 @@

    Equality+LinearArith (Parallel Track)

    - + Vampire 0 2642563.40426026325587 @@ -202,7 +202,7 @@

    Equality+LinearArith (Parallel Track)

    - + Par4n 0 317699.356303139739 @@ -211,7 +211,7 @@

    Equality+LinearArith (Parallel Track)

    - + cvc5-gg 2 383680.01438038205529 @@ -231,7 +231,7 @@

    Equality+LinearArith (Parallel Track)

    - + Vampire 0 92521.3769091040104 @@ -240,7 +240,7 @@

    Equality+LinearArith (Parallel Track)

    - + Par4n 0 01032.0000437043 @@ -249,7 +249,7 @@

    Equality+LinearArith (Parallel Track)

    - + cvc5-gg 2 38776.0143803875029 @@ -273,7 +273,6 @@

    Equality+LinearArith (Parallel Track)

    - + - diff --git a/archive/2021/results/equality-lineararith-single-query.html b/archive/2021/results/equality-lineararith-single-query.html index 20b7371a..91dc7c1a 100644 --- a/archive/2021/results/equality-lineararith-single-query.html +++ b/archive/2021/results/equality-lineararith-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    Equality+LinearArith (Single Query Track)

    Competition results for the Equality+LinearArith - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    Equality+LinearArith (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    Equality+LinearArith (Single Query Track)

    - + 2020-CVC4n 0 12789 @@ -142,7 +142,7 @@

    Equality+LinearArith (Single Query Track)

    - + cvc5 - fixedn 0 12658 @@ -153,7 +153,7 @@

    Equality+LinearArith (Single Query Track)

    - + cvc5 0 12640 @@ -164,7 +164,7 @@

    Equality+LinearArith (Single Query Track)

    - + Vampire 0 11894 @@ -175,7 +175,7 @@

    Equality+LinearArith (Single Query Track)

    - + Vampire - fixedn 0 11846 @@ -186,7 +186,7 @@

    Equality+LinearArith (Single Query Track)

    - + 2020-z3n 0 4208 @@ -197,7 +197,7 @@

    Equality+LinearArith (Single Query Track)

    - + z3n 0 4143 @@ -208,7 +208,7 @@

    Equality+LinearArith (Single Query Track)

    - + veriT 0 4118 @@ -219,7 +219,7 @@

    Equality+LinearArith (Single Query Track)

    - + 2019-Par4n 0 3261 @@ -230,7 +230,7 @@

    Equality+LinearArith (Single Query Track)

    - + iProver 0 3251 @@ -241,7 +241,7 @@

    Equality+LinearArith (Single Query Track)

    - + SMTInterpol 0 2798 @@ -252,7 +252,7 @@

    Equality+LinearArith (Single Query Track)

    - + 2018-CVC4n 0 1320 @@ -263,7 +263,7 @@

    Equality+LinearArith (Single Query Track)

    - + 2019-CVC4n 0 197 @@ -274,7 +274,7 @@

    Equality+LinearArith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 60 @@ -285,7 +285,7 @@

    Equality+LinearArith (Single Query Track)

    - + 2018-Z3n 0 24 @@ -296,7 +296,7 @@

    Equality+LinearArith (Single Query Track)

    - + 2020-Vampiren 14 12353 @@ -318,7 +318,7 @@

    Equality+LinearArith (Single Query Track)

    - + 2020-CVC4n 0 127892226187.0672229115.2951278926712522327001728 @@ -327,7 +327,7 @@

    Equality+LinearArith (Single Query Track)

    - + cvc5 - fixedn 0 126582359173.7182360990.771265822912429340101886 @@ -336,7 +336,7 @@

    Equality+LinearArith (Single Query Track)

    - + cvc5 0 126402376849.3062378613.5891264022412416341901905 @@ -345,7 +345,7 @@

    Equality+LinearArith (Single Query Track)

    - + Vampire 0 121696123549.7514959419.1221216911012059389003800 @@ -354,7 +354,7 @@

    Equality+LinearArith (Single Query Track)

    - + Vampire - fixedn 0 120836029972.9314930900.1881208310811975397603775 @@ -363,7 +363,7 @@

    Equality+LinearArith (Single Query Track)

    - + 2020-z3n 0 42081966733.1161969429.86342081884020200798441346 @@ -372,7 +372,7 @@

    Equality+LinearArith (Single Query Track)

    - + z3n 0 41432009114.6232015133.81541431903953207298441362 @@ -381,7 +381,7 @@

    Equality+LinearArith (Single Query Track)

    - + veriT 0 41182311830.9512311701.497411804118209798441808 @@ -390,7 +390,7 @@

    Equality+LinearArith (Single Query Track)

    - + iProver 0 33254745136.9314535983.447332503325372790073674 @@ -399,7 +399,7 @@

    Equality+LinearArith (Single Query Track)

    - + 2019-Par4n 0 32801583409.1061545192.44332806532151271115081226 @@ -408,7 +408,7 @@

    Equality+LinearArith (Single Query Track)

    - + SMTInterpol 0 28014045606.2553903347.5592801962705341498443145 @@ -417,7 +417,7 @@

    Equality+LinearArith (Single Query Track)

    - + 2018-CVC4n 0 1320360739.893361468.4481320143117746514274256 @@ -426,7 +426,7 @@

    Equality+LinearArith (Single Query Track)

    - + 2019-CVC4n 0 197202450.312204473.4861970197801578280 @@ -435,7 +435,7 @@

    Equality+LinearArith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 6094277.91774897.3596015456155984428 @@ -444,7 +444,7 @@

    Equality+LinearArith (Single Query Track)

    - + 2018-Z3n 0 243656.4433656.663244202160332 @@ -453,7 +453,7 @@

    Equality+LinearArith (Single Query Track)

    - + 2020-Vampiren 14 124855022665.7324128823.6661248510412381357403150 @@ -473,7 +473,7 @@

    Equality+LinearArith (Single Query Track)

    - + 2020-CVC4n 0 267127517.451128094.6562672670119156731728 @@ -482,7 +482,7 @@

    Equality+LinearArith (Single Query Track)

    - + cvc5 - fixedn 0 229162025.165162362.6442292290157156731886 @@ -491,7 +491,7 @@

    Equality+LinearArith (Single Query Track)

    - + cvc5 0 224166539.821166903.4452242240162156731905 @@ -500,7 +500,7 @@

    Equality+LinearArith (Single Query Track)

    - + z3n 0 19071897.72271898.282190190064158051362 @@ -509,7 +509,7 @@

    Equality+LinearArith (Single Query Track)

    - + 2020-z3n 0 18868035.05868036.263188188066158051346 @@ -518,7 +518,7 @@

    Equality+LinearArith (Single Query Track)

    - + 2018-CVC4n 0 14357621.39357992.731143143013215784256 @@ -527,7 +527,7 @@

    Equality+LinearArith (Single Query Track)

    - + Vampire 0 110349179.606332182.3651101100276156733800 @@ -536,7 +536,7 @@

    Equality+LinearArith (Single Query Track)

    - + Vampire - fixedn 0 108338220.568332187.0491081080278156733775 @@ -545,7 +545,7 @@

    Equality+LinearArith (Single Query Track)

    - + 2020-Vampiren 0 104326004.742321846.451041040282156733150 @@ -554,7 +554,7 @@

    Equality+LinearArith (Single Query Track)

    - + SMTInterpol 0 9695315.09695121.09896960158158053145 @@ -563,7 +563,7 @@

    Equality+LinearArith (Single Query Track)

    - + 2019-Par4n 0 655406.5792837.636656501159931226 @@ -572,7 +572,7 @@

    Equality+LinearArith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 152397.6031572.943151502391580528 @@ -581,7 +581,7 @@

    Equality+LinearArith (Single Query Track)

    - + 2018-Z3n 0 43655.2163655.4344402160532 @@ -590,7 +590,7 @@

    Equality+LinearArith (Single Query Track)

    - + 2019-CVC4n 0 00.00.000001605980 @@ -599,7 +599,7 @@

    Equality+LinearArith (Single Query Track)

    - + iProver 0 063601.57563602.29200056160033674 @@ -608,7 +608,7 @@

    Equality+LinearArith (Single Query Track)

    - + veriT 0 0124465.909124465.675000254158051808 @@ -628,7 +628,7 @@

    Equality+LinearArith (Single Query Track)

    - + 2020-CVC4n 0 12522371050.295373379.5091252201252282727101728 @@ -637,7 +637,7 @@

    Equality+LinearArith (Single Query Track)

    - + cvc5 - fixedn 0 12429461331.291462799.9781242901242992027101886 @@ -646,7 +646,7 @@

    Equality+LinearArith (Single Query Track)

    - + cvc5 0 12416474465.275475848.911241601241693327101905 @@ -655,7 +655,7 @@

    Equality+LinearArith (Single Query Track)

    - + Vampire 0 120592923511.5851839222.36512059012059129027103800 @@ -664,7 +664,7 @@

    Equality+LinearArith (Single Query Track)

    - + Vampire - fixedn 0 119752863350.5631809960.4311975011975137427103775 @@ -673,7 +673,7 @@

    Equality+LinearArith (Single Query Track)

    - + veriT 0 4118602309.744602203.921411804118473114681808 @@ -682,7 +682,7 @@

    Equality+LinearArith (Single Query Track)

    - + 2020-z3n 0 4020584349.729585124.021402004020571114681346 @@ -691,7 +691,7 @@

    Equality+LinearArith (Single Query Track)

    - + z3n 0 3953653956.826655967.594395303953638114681362 @@ -700,7 +700,7 @@

    Equality+LinearArith (Single Query Track)

    - + iProver 0 33252565935.3562356781.1553325033251908108263674 @@ -709,7 +709,7 @@

    Equality+LinearArith (Single Query Track)

    - + 2019-Par4n 0 3215144002.527108354.80732150321575127691226 @@ -718,7 +718,7 @@

    Equality+LinearArith (Single Query Track)

    - + SMTInterpol 0 27052358306.4342290795.2362705027051886114683145 @@ -727,7 +727,7 @@

    Equality+LinearArith (Single Query Track)

    - + 2018-CVC4n 0 117793747.57494104.27511770117715814724256 @@ -736,7 +736,7 @@

    Equality+LinearArith (Single Query Track)

    - + 2019-CVC4n 0 197118450.312120473.4861970197101585280 @@ -745,7 +745,7 @@

    Equality+LinearArith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 4569394.93155859.0324504545461146828 @@ -754,7 +754,7 @@

    Equality+LinearArith (Single Query Track)

    - + 2018-Z3n 0 201.2271.229200200160392 @@ -763,7 +763,7 @@

    Equality+LinearArith (Single Query Track)

    - + 2020-Vampiren 14 123811883945.291024735.5031238101238196827103150 @@ -783,7 +783,7 @@

    Equality+LinearArith (Single Query Track)

    - + cvc5 0 1222759087.8259054.1181222717912048383202344 @@ -792,7 +792,7 @@

    Equality+LinearArith (Single Query Track)

    - + cvc5 - fixedn 0 1222659126.69659078.1621222617912047383302345 @@ -801,7 +801,7 @@

    Equality+LinearArith (Single Query Track)

    - + 2020-CVC4n 0 1218359650.65359643.0181218317812005387602361 @@ -810,7 +810,7 @@

    Equality+LinearArith (Single Query Track)

    - + Vampire 0 10391162813.025145811.7331039110010291566805668 @@ -819,7 +819,7 @@

    Equality+LinearArith (Single Query Track)

    - + Vampire - fixedn 0 10299162726.246145844.129102999810201576005666 @@ -828,7 +828,7 @@

    Equality+LinearArith (Single Query Track)

    - + 2020-z3n 0 409452445.38652444.78440941853909212198442087 @@ -837,7 +837,7 @@

    Equality+LinearArith (Single Query Track)

    - + veriT 0 403050802.88750798.527403004030218598441925 @@ -846,7 +846,7 @@

    Equality+LinearArith (Single Query Track)

    - + z3n 0 400954371.56954370.65640091863823220698442171 @@ -855,7 +855,7 @@

    Equality+LinearArith (Single Query Track)

    - + 2019-Par4n 0 319234432.39933469.06131926231301359115081314 @@ -864,7 +864,7 @@

    Equality+LinearArith (Single Query Track)

    - + iProver 0 3073116544.627102390.161307303073397990073930 @@ -873,7 +873,7 @@

    Equality+LinearArith (Single Query Track)

    - + SMTInterpol 0 260889953.08685810.5412608962512360798443430 @@ -882,7 +882,7 @@

    Equality+LinearArith (Single Query Track)

    - + 2018-CVC4n 0 131811374.13211369.421318142117646714274459 @@ -891,7 +891,7 @@

    Equality+LinearArith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 6037575.16222494.24760154561559844105 @@ -900,7 +900,7 @@

    Equality+LinearArith (Single Query Track)

    - + 2019-CVC4n 0 266053.8646053.8542602625115782251 @@ -909,7 +909,7 @@

    Equality+LinearArith (Single Query Track)

    - + 2018-Z3n 0 22113.959113.963222204160334 @@ -918,7 +918,7 @@

    Equality+LinearArith (Single Query Track)

    - + 2020-Vampiren 13 10343174141.507152322.1381034310010243571605572 @@ -942,7 +942,6 @@

    Equality+LinearArith (Single Query Track)

    - + - diff --git a/archive/2021/results/equality-lineararith-unsat-core.html b/archive/2021/results/equality-lineararith-unsat-core.html index 9f848121..b9d13ccd 100644 --- a/archive/2021/results/equality-lineararith-unsat-core.html +++ b/archive/2021/results/equality-lineararith-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    Equality+LinearArith (Unsat Core Track)

    Competition results for the Equality+LinearArith - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    Equality+LinearArith (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5-uccvc5-uc - - + + @@ -126,7 +126,7 @@

    Equality+LinearArith (Unsat Core Track)

    - + cvc5-uc 0 1407812 @@ -137,7 +137,7 @@

    Equality+LinearArith (Unsat Core Track)

    - + 2020-CVC4-ucn 0 1407110 @@ -148,7 +148,7 @@

    Equality+LinearArith (Unsat Core Track)

    - + 2020-z3n 0 1083727 @@ -159,7 +159,7 @@

    Equality+LinearArith (Unsat Core Track)

    - + z3n 0 1082260 @@ -170,7 +170,7 @@

    Equality+LinearArith (Unsat Core Track)

    - + SMTInterpol 0 778848 @@ -181,7 +181,7 @@

    Equality+LinearArith (Unsat Core Track)

    - + SMTInterpol-remus 0 676156 @@ -192,7 +192,7 @@

    Equality+LinearArith (Unsat Core Track)

    - + UltimateEliminator+MathSAT 3 13528 @@ -203,7 +203,7 @@

    Equality+LinearArith (Unsat Core Track)

    - + Vampire 19 1092544 @@ -225,7 +225,7 @@

    Equality+LinearArith (Unsat Core Track)

    - + cvc5-uc 0 1407812389810.94389688.113304 @@ -234,7 +234,7 @@

    Equality+LinearArith (Unsat Core Track)

    - + 2020-CVC4-ucn 0 1407110407702.695407678.379318 @@ -243,7 +243,7 @@

    Equality+LinearArith (Unsat Core Track)

    - + 2020-z3n 0 1083727400434.713400339.28242 @@ -252,7 +252,7 @@

    Equality+LinearArith (Unsat Core Track)

    - + z3n 0 1082260404263.211404755.489243 @@ -261,7 +261,7 @@

    Equality+LinearArith (Unsat Core Track)

    - + SMTInterpol 0 7802402480065.7232403021.8051900 @@ -270,7 +270,7 @@

    Equality+LinearArith (Unsat Core Track)

    - + SMTInterpol-remus 0 7737104859431.4614461611.8851908 @@ -279,7 +279,7 @@

    Equality+LinearArith (Unsat Core Track)

    - + UltimateEliminator+MathSAT 3 13528124169.24289179.35525 @@ -288,7 +288,7 @@

    Equality+LinearArith (Unsat Core Track)

    - + Vampire 19 10970862016231.3471019077.325541 @@ -312,7 +312,6 @@

    Equality+LinearArith (Unsat Core Track)

    - + - diff --git a/archive/2021/results/equality-machinearith-cloud.html b/archive/2021/results/equality-machinearith-cloud.html index e5fc4ea4..4d163f48 100644 --- a/archive/2021/results/equality-machinearith-cloud.html +++ b/archive/2021/results/equality-machinearith-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    Equality+MachineArith (Cloud Track)

    Competition results for the Equality+MachineArith - + division - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    Equality+MachineArith (Cloud Track)

    - + cvc5-gg 0 151331.59815015101 @@ -126,7 +126,7 @@

    Equality+MachineArith (Cloud Track)

    - + Par4n 0 019200.000016016 @@ -146,7 +146,7 @@

    Equality+MachineArith (Cloud Track)

    - + cvc5-gg 0 00.00000161 @@ -155,7 +155,7 @@

    Equality+MachineArith (Cloud Track)

    - + Par4n 0 00.000001616 @@ -175,7 +175,7 @@

    Equality+MachineArith (Cloud Track)

    - + cvc5-gg 0 15131.59815015011 @@ -184,7 +184,7 @@

    Equality+MachineArith (Cloud Track)

    - + Par4n 0 018000.000015116 @@ -204,7 +204,7 @@

    Equality+MachineArith (Cloud Track)

    - + cvc5-gg 0 1399.48913013303 @@ -213,7 +213,7 @@

    Equality+MachineArith (Cloud Track)

    - + Par4n 0 0384.000016016 @@ -237,7 +237,6 @@

    Equality+MachineArith (Cloud Track)

    - + - diff --git a/archive/2021/results/equality-machinearith-parallel.html b/archive/2021/results/equality-machinearith-parallel.html index ca9635c0..6331ab0c 100644 --- a/archive/2021/results/equality-machinearith-parallel.html +++ b/archive/2021/results/equality-machinearith-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    Equality+MachineArith (Parallel Track)

    Competition results for the Equality+MachineArith - + division - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    Equality+MachineArith (Parallel Track)

    - + cvc5-gg 0 161334.6116016101 @@ -126,7 +126,7 @@

    Equality+MachineArith (Parallel Track)

    - + Par4n 0 019200.000016116 @@ -146,7 +146,7 @@

    Equality+MachineArith (Parallel Track)

    - + cvc5-gg 0 00.00000171 @@ -155,7 +155,7 @@

    Equality+MachineArith (Parallel Track)

    - + Par4n 0 00.000001716 @@ -175,7 +175,7 @@

    Equality+MachineArith (Parallel Track)

    - + cvc5-gg 0 16134.6116016011 @@ -184,7 +184,7 @@

    Equality+MachineArith (Parallel Track)

    - + Par4n 0 018000.000015216 @@ -204,7 +204,7 @@

    Equality+MachineArith (Parallel Track)

    - + cvc5-gg 0 14101.16614014303 @@ -213,7 +213,7 @@

    Equality+MachineArith (Parallel Track)

    - + Par4n 0 0384.000016116 @@ -237,7 +237,6 @@

    Equality+MachineArith (Parallel Track)

    - + - diff --git a/archive/2021/results/equality-machinearith-single-query.html b/archive/2021/results/equality-machinearith-single-query.html index c13e1762..97ff73d3 100644 --- a/archive/2021/results/equality-machinearith-single-query.html +++ b/archive/2021/results/equality-machinearith-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    Equality+MachineArith (Single Query Track)

    Competition results for the Equality+MachineArith - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    Equality+MachineArith (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    Equality+MachineArith (Single Query Track)

    - + cvc5 0 1165 @@ -142,7 +142,7 @@

    Equality+MachineArith (Single Query Track)

    - + cvc5 - fixedn 0 1153 @@ -153,7 +153,7 @@

    Equality+MachineArith (Single Query Track)

    - + 2020-CVC4n 0 985 @@ -164,7 +164,7 @@

    Equality+MachineArith (Single Query Track)

    - + z3n 0 310 @@ -175,7 +175,7 @@

    Equality+MachineArith (Single Query Track)

    - + 2020-z3n 0 147 @@ -186,7 +186,7 @@

    Equality+MachineArith (Single Query Track)

    - + 2018-Z3n 0 106 @@ -197,7 +197,7 @@

    Equality+MachineArith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 31 @@ -219,7 +219,7 @@

    Equality+MachineArith (Single Query Track)

    - + cvc5 0 11651849169.3841854430.5541165242923165701422 @@ -228,7 +228,7 @@

    Equality+MachineArith (Single Query Track)

    - + cvc5 - fixedn 0 11531853416.4041858923.3241153235918166901424 @@ -237,7 +237,7 @@

    Equality+MachineArith (Single Query Track)

    - + 2020-CVC4n 0 985613497.101617284.7569852587278081029432 @@ -246,7 +246,7 @@

    Equality+MachineArith (Single Query Track)

    - + z3n 0 310826577.999827117.5743101072037631749646 @@ -255,7 +255,7 @@

    Equality+MachineArith (Single Query Track)

    - + 2020-z3n 0 147164341.561164342.11314760871652510131 @@ -264,7 +264,7 @@

    Equality+MachineArith (Single Query Track)

    - + 2018-Z3n 0 10645226.96645227.585106347237267936 @@ -273,7 +273,7 @@

    Equality+MachineArith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 311086886.2921082219.40831201114141377859 @@ -293,7 +293,7 @@

    Equality+MachineArith (Single Query Track)

    - + 2020-CVC4n 0 25873263.6276389.4252582580842480432 @@ -302,7 +302,7 @@

    Equality+MachineArith (Single Query Track)

    - + cvc5 0 242186196.746188508.879242242017424061422 @@ -311,7 +311,7 @@

    Equality+MachineArith (Single Query Track)

    - + cvc5 - fixedn 0 235193349.477195478.458235235018124061424 @@ -320,7 +320,7 @@

    Equality+MachineArith (Single Query Track)

    - + z3n 0 10713159.48613159.9611071070112704646 @@ -329,7 +329,7 @@

    Equality+MachineArith (Single Query Track)

    - + 2020-z3n 0 604789.4274788.8046060042758131 @@ -338,7 +338,7 @@

    Equality+MachineArith (Single Query Track)

    - + 2018-Z3n 0 342971.382971.558343402278636 @@ -347,7 +347,7 @@

    Equality+MachineArith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 2067144.91566091.64202001512651859 @@ -367,7 +367,7 @@

    Equality+MachineArith (Single Query Track)

    - + cvc5 0 923135089.184136275.92692309239118081422 @@ -376,7 +376,7 @@

    Equality+MachineArith (Single Query Track)

    - + cvc5 - fixedn 0 918139953.789141077.65391809189618081424 @@ -385,7 +385,7 @@

    Equality+MachineArith (Single Query Track)

    - + 2020-CVC4n 0 72737132.1537615.6867270727722023432 @@ -394,7 +394,7 @@

    Equality+MachineArith (Single Query Track)

    - + z3n 0 203118696.143118661.5052030203952524646 @@ -403,7 +403,7 @@

    Equality+MachineArith (Single Query Track)

    - + 2020-z3n 0 8725581.37125581.4987087222713131 @@ -412,7 +412,7 @@

    Equality+MachineArith (Single Query Track)

    - + 2018-Z3n 0 7214723.81514723.8387207212273836 @@ -421,7 +421,7 @@

    Equality+MachineArith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 11210256.018209068.205110113192492859 @@ -441,7 +441,7 @@

    Equality+MachineArith (Single Query Track)

    - + cvc5 - fixedn 0 91545680.42545678.93291587828190701856 @@ -450,7 +450,7 @@

    Equality+MachineArith (Single Query Track)

    - + cvc5 0 91545688.71945683.93691587828190701856 @@ -459,7 +459,7 @@

    Equality+MachineArith (Single Query Track)

    - + 2020-CVC4n 0 71718857.66918858.6067176665110761029765 @@ -468,7 +468,7 @@

    Equality+MachineArith (Single Query Track)

    - + z3n 0 24219953.14819952.279242911518311749784 @@ -477,7 +477,7 @@

    Equality+MachineArith (Single Query Track)

    - + 2020-z3n 0 1344191.8244190.81913455791782510163 @@ -486,7 +486,7 @@

    Equality+MachineArith (Single Query Track)

    - + 2018-Z3n 0 941257.6071257.62294266849267949 @@ -495,7 +495,7 @@

    Equality+MachineArith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 2726532.02925376.1812718914181377970 @@ -519,7 +519,6 @@

    Equality+MachineArith (Single Query Track)

    - + - diff --git a/archive/2021/results/equality-machinearith-unsat-core.html b/archive/2021/results/equality-machinearith-unsat-core.html index 4669ca85..f08111b7 100644 --- a/archive/2021/results/equality-machinearith-unsat-core.html +++ b/archive/2021/results/equality-machinearith-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    Equality+MachineArith (Unsat Core Track)

    Competition results for the Equality+MachineArith - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    Equality+MachineArith (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5-uccvc5-uc - - + + @@ -126,7 +126,7 @@

    Equality+MachineArith (Unsat Core Track)

    - + cvc5-uc 0 8989 @@ -137,7 +137,7 @@

    Equality+MachineArith (Unsat Core Track)

    - + 2020-CVC4-ucn 0 8981 @@ -148,7 +148,7 @@

    Equality+MachineArith (Unsat Core Track)

    - + z3n 0 3 @@ -159,7 +159,7 @@

    Equality+MachineArith (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -181,7 +181,7 @@

    Equality+MachineArith (Unsat Core Track)

    - + cvc5-uc 0 89892040.4292040.4851 @@ -190,7 +190,7 @@

    Equality+MachineArith (Unsat Core Track)

    - + 2020-CVC4-ucn 0 89811983.3011981.4491 @@ -199,7 +199,7 @@

    Equality+MachineArith (Unsat Core Track)

    - + z3n 0 382.26882.2750 @@ -208,7 +208,7 @@

    Equality+MachineArith (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 09.2445.4730 @@ -232,7 +232,6 @@

    Equality+MachineArith (Unsat Core Track)

    - + - diff --git a/archive/2021/results/equality-nonlineararith-cloud.html b/archive/2021/results/equality-nonlineararith-cloud.html index 022476f2..f271b01e 100644 --- a/archive/2021/results/equality-nonlineararith-cloud.html +++ b/archive/2021/results/equality-nonlineararith-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    Equality+NonLinearArith (Cloud Track)

    Competition results for the Equality+NonLinearArith - + division - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    Equality+NonLinearArith (Cloud Track)

    - + Vampire 0 1862420.7821801846346 @@ -126,7 +126,7 @@

    Equality+NonLinearArith (Cloud Track)

    - + cvc5-gg 0 1021627.6111001057018 @@ -135,7 +135,7 @@

    Equality+NonLinearArith (Cloud Track)

    - + Par4n 0 037200.0000313631 @@ -155,7 +155,7 @@

    Equality+NonLinearArith (Cloud Track)

    - + cvc5-gg 0 00.000006718 @@ -164,7 +164,7 @@

    Equality+NonLinearArith (Cloud Track)

    - + Par4n 0 00.000006731 @@ -173,7 +173,7 @@

    Equality+NonLinearArith (Cloud Track)

    - + Vampire 0 00.000006746 @@ -193,7 +193,7 @@

    Equality+NonLinearArith (Cloud Track)

    - + Vampire 0 1820420.78218018113846 @@ -202,7 +202,7 @@

    Equality+NonLinearArith (Cloud Track)

    - + cvc5-gg 0 109627.61110010193818 @@ -211,7 +211,7 @@

    Equality+NonLinearArith (Cloud Track)

    - + Par4n 0 021600.0000184931 @@ -231,7 +231,7 @@

    Equality+NonLinearArith (Cloud Track)

    - + cvc5-gg 0 10459.6111001057018 @@ -240,7 +240,7 @@

    Equality+NonLinearArith (Cloud Track)

    - + Vampire 0 11516.01310163363 @@ -249,7 +249,7 @@

    Equality+NonLinearArith (Cloud Track)

    - + Par4n 0 0744.0000313631 @@ -273,7 +273,6 @@

    Equality+NonLinearArith (Cloud Track)

    - + - diff --git a/archive/2021/results/equality-nonlineararith-incremental.html b/archive/2021/results/equality-nonlineararith-incremental.html index 4c141d3a..d2e83e2e 100644 --- a/archive/2021/results/equality-nonlineararith-incremental.html +++ b/archive/2021/results/equality-nonlineararith-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    Equality+NonLinearArith (Incremental Track)

    Competition results for the Equality+NonLinearArith - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    Equality+NonLinearArith (Incremental Track)

    Parallel Performance cvc5-inc - - + + @@ -124,7 +124,7 @@

    Equality+NonLinearArith (Incremental Track)

    - + 2020-z3n 0 918071416712.1871417100.0325856501045 @@ -133,7 +133,7 @@

    Equality+NonLinearArith (Incremental Track)

    - + z3n 0 916601417098.6861417470.53425871201042 @@ -142,7 +142,7 @@

    Equality+NonLinearArith (Incremental Track)

    - + cvc5-inc 0 603081051039.5231050913.5513142980857 @@ -151,7 +151,7 @@

    Equality+NonLinearArith (Incremental Track)

    - + 2020-CVC4-incn 0 56446999898.53999787.8683174140819 @@ -160,7 +160,7 @@

    Equality+NonLinearArith (Incremental Track)

    - + SMTInterpol 0 389731367378.1391351225.24433488701092 @@ -169,7 +169,7 @@

    Equality+NonLinearArith (Incremental Track)

    - + UltimateEliminator+MathSAT 0 1302312769.1577373.41636083702 @@ -178,7 +178,7 @@

    Equality+NonLinearArith (Incremental Track)

    - + 2019-CVC4-incn 0 309427553.96427553.877358022 @@ -202,7 +202,6 @@

    Equality+NonLinearArith (Incremental Track)

    - + - diff --git a/archive/2021/results/equality-nonlineararith-parallel.html b/archive/2021/results/equality-nonlineararith-parallel.html index e1feb262..eefee2aa 100644 --- a/archive/2021/results/equality-nonlineararith-parallel.html +++ b/archive/2021/results/equality-nonlineararith-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    Equality+NonLinearArith (Parallel Track)

    Competition results for the Equality+NonLinearArith - + division - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    Equality+NonLinearArith (Parallel Track)

    - + cvc5-gg 0 125227.884120125904 @@ -126,7 +126,7 @@

    Equality+NonLinearArith (Parallel Track)

    - + Vampire 0 978300.96490962062 @@ -135,7 +135,7 @@

    Equality+NonLinearArith (Parallel Track)

    - + Par4n 0 040800.0000343734 @@ -155,7 +155,7 @@

    Equality+NonLinearArith (Parallel Track)

    - + cvc5-gg 0 00.00000714 @@ -164,7 +164,7 @@

    Equality+NonLinearArith (Parallel Track)

    - + Par4n 0 00.000007134 @@ -173,7 +173,7 @@

    Equality+NonLinearArith (Parallel Track)

    - + Vampire 0 00.000007162 @@ -193,7 +193,7 @@

    Equality+NonLinearArith (Parallel Track)

    - + cvc5-gg 0 121627.8841201212474 @@ -202,7 +202,7 @@

    Equality+NonLinearArith (Parallel Track)

    - + Vampire 0 921900.964909154762 @@ -211,7 +211,7 @@

    Equality+NonLinearArith (Parallel Track)

    - + Par4n 0 019200.0000165534 @@ -231,7 +231,7 @@

    Equality+NonLinearArith (Parallel Track)

    - + cvc5-gg 0 10171.398100106106 @@ -240,7 +240,7 @@

    Equality+NonLinearArith (Parallel Track)

    - + Vampire 0 11683.0910170070 @@ -249,7 +249,7 @@

    Equality+NonLinearArith (Parallel Track)

    - + Par4n 0 0816.0000343734 @@ -273,7 +273,6 @@

    Equality+NonLinearArith (Parallel Track)

    - + - diff --git a/archive/2021/results/equality-nonlineararith-single-query.html b/archive/2021/results/equality-nonlineararith-single-query.html index 7ee4fff2..b2890e8a 100644 --- a/archive/2021/results/equality-nonlineararith-single-query.html +++ b/archive/2021/results/equality-nonlineararith-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    Equality+NonLinearArith (Single Query Track)

    Competition results for the Equality+NonLinearArith - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    Equality+NonLinearArith (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) VampireiProverUltimateEliminator+MathSAT - - + + cvc5 - - + + Vampire - + @@ -131,7 +131,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + 2020-CVC4n 0 4593 @@ -142,7 +142,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + cvc5 - fixedn 0 4586 @@ -153,7 +153,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + Vampire 0 3455 @@ -164,7 +164,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + Vampire - fixedn 0 3421 @@ -175,7 +175,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + 2020-Vampiren 0 3171 @@ -186,7 +186,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + z3n 0 2796 @@ -197,7 +197,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + iProver 0 1748 @@ -208,7 +208,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 521 @@ -219,7 +219,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + SMTInterpol 0 176 @@ -230,7 +230,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + cvc5 5 4593 @@ -241,7 +241,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + 2019-Par4n 7 3819 @@ -263,7 +263,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + 2020-CVC4n 0 45933715235.6033719727.331459371538783210132976 @@ -272,7 +272,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + cvc5 - fixedn 0 45863724178.3643725278.93145866993887323003055 @@ -281,7 +281,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + Vampire - fixedn 0 34995865235.5555129849.2993499034994304134085 @@ -290,7 +290,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + 2020-Vampiren 0 34436495194.0495341645.4043443034434360134142 @@ -299,7 +299,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + z3n 0 27962854439.1162856348.64627966392157378512351947 @@ -308,7 +308,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + iProver 0 19577036594.1756476461.519570195752436165204 @@ -317,7 +317,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 521848773.392834372.03452133418760601235657 @@ -326,7 +326,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + SMTInterpol 0 17618263.13114391.291768393610215389 @@ -335,7 +335,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + cvc5 5 45933709218.0633710038.99445937023891322303040 @@ -344,7 +344,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + 2019-Par4n 7 38393549594.2933416508.27938397373102273912382731 @@ -353,7 +353,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + Vampire 26 35565903838.5295119602.873556035564247134054 @@ -373,7 +373,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + 2020-CVC4n 0 71549494.68249740.99471571503670652976 @@ -382,7 +382,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + cvc5 - fixedn 0 69958841.7159019.98169969905270653055 @@ -391,7 +391,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + z3n 0 639102207.513102206.812639639011270651947 @@ -400,7 +400,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 334445329.539444413.32533433404177065657 @@ -409,7 +409,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + SMTInterpol 0 839957.0449873.0818383066570689 @@ -418,7 +418,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + Vampire 0 0884400.73869983.1600075170654054 @@ -427,7 +427,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + iProver 0 0890456.643890425.75900074870685204 @@ -436,7 +436,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + Vampire - fixedn 0 0926423.68901155.6500075170654085 @@ -445,7 +445,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + 2020-Vampiren 0 0915600.95901181.3900075170654142 @@ -454,7 +454,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + cvc5 5 70257169.33957254.43170270204970653040 @@ -463,7 +463,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + 2019-Par4n 7 73749768.51833538.20673773701470652731 @@ -483,7 +483,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + cvc5 0 3891732382.543733102.23838910389170032253040 @@ -492,7 +492,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + cvc5 - fixedn 0 3887745593.718746510.26638870388770432253055 @@ -501,7 +501,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + 2020-CVC4n 0 3878770113.794774359.56238780387870632322976 @@ -510,7 +510,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + Vampire - fixedn 0 34991927410.4451267284.589349903499108532324085 @@ -519,7 +519,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + 2020-Vampiren 0 34432528363.7491504001.795344303443114132324142 @@ -528,7 +528,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + 2019-Par4n 0 3102607825.774490970.07331020310231543992731 @@ -537,7 +537,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + z3n 0 2157881127.624881010.832215702157126343961947 @@ -546,7 +546,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + iProver 0 19573504937.5322944835.741195701957229435655204 @@ -555,7 +555,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 187346285.374337576.086187018732334396657 @@ -564,7 +564,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + SMTInterpol 0 936000.1152548.82993093326144629 @@ -573,7 +573,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + Vampire 26 35562018235.6791288057.52355603556102832324054 @@ -593,7 +593,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + cvc5 - fixedn 0 412789263.88289204.07641276783449368903556 @@ -602,7 +602,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + 2020-CVC4n 0 390993393.20693308.278390962232873894133749 @@ -611,7 +611,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + z3n 0 264988272.74788189.24426496122037393212353403 @@ -620,7 +620,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + Vampire 0 2292159717.871139756.9352292022925511135511 @@ -629,7 +629,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + Vampire - fixedn 0 2238159125.997140003.6832238022385565135535 @@ -638,7 +638,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + 2020-Vampiren 0 2061153605.265141983.0442061020615742135707 @@ -647,7 +647,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + iProver 0 1466157828.176143406.23314660146657346165695 @@ -656,7 +656,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 45349602.67137281.24745327118261281235791 @@ -665,7 +665,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + SMTInterpol 0 1754733.8622952.45517582936103153811 @@ -674,7 +674,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + cvc5 2 413189132.86589077.0841316813450368503548 @@ -683,7 +683,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + 2019-Par4n 7 324086529.66883543.72532406782562333812383330 @@ -707,7 +707,6 @@

    Equality+NonLinearArith (Single Query Track)

    - + - diff --git a/archive/2021/results/equality-nonlineararith-unsat-core.html b/archive/2021/results/equality-nonlineararith-unsat-core.html index 4de50679..76bbc2d8 100644 --- a/archive/2021/results/equality-nonlineararith-unsat-core.html +++ b/archive/2021/results/equality-nonlineararith-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    Equality+NonLinearArith (Unsat Core Track)

    Competition results for the Equality+NonLinearArith - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    Equality+NonLinearArith (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5-uccvc5-uc - - + + @@ -126,7 +126,7 @@

    Equality+NonLinearArith (Unsat Core Track)

    - + 2020-CVC4-ucn 0 122061 @@ -137,7 +137,7 @@

    Equality+NonLinearArith (Unsat Core Track)

    - + cvc5-uc 0 121729 @@ -148,7 +148,7 @@

    Equality+NonLinearArith (Unsat Core Track)

    - + Vampire 0 99641 @@ -159,7 +159,7 @@

    Equality+NonLinearArith (Unsat Core Track)

    - + z3n 0 82954 @@ -170,7 +170,7 @@

    Equality+NonLinearArith (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 402 @@ -181,7 +181,7 @@

    Equality+NonLinearArith (Unsat Core Track)

    - + SMTInterpol 0 326 @@ -203,7 +203,7 @@

    Equality+NonLinearArith (Unsat Core Track)

    - + 2020-CVC4-ucn 0 122061329974.618329925.657237 @@ -212,7 +212,7 @@

    Equality+NonLinearArith (Unsat Core Track)

    - + cvc5-uc 0 121729441308.387441214.652317 @@ -221,7 +221,7 @@

    Equality+NonLinearArith (Unsat Core Track)

    - + Vampire 0 102686598548.869295220.129156 @@ -230,7 +230,7 @@

    Equality+NonLinearArith (Unsat Core Track)

    - + z3n 0 82954173826.482174719.65792 @@ -239,7 +239,7 @@

    Equality+NonLinearArith (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 402321912.578312918.086228 @@ -248,7 +248,7 @@

    Equality+NonLinearArith (Unsat Core Track)

    - + SMTInterpol 0 3262334.5021064.1270 @@ -272,7 +272,6 @@

    Equality+NonLinearArith (Unsat Core Track)

    - + - diff --git a/archive/2021/results/equality-parallel.html b/archive/2021/results/equality-parallel.html index 1260072d..ec8f283b 100644 --- a/archive/2021/results/equality-parallel.html +++ b/archive/2021/results/equality-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    Equality (Parallel Track)

    Competition results for the Equality - + division - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    Equality (Parallel Track)

    - + Vampire 0 1722983.122178917017 @@ -126,7 +126,7 @@

    Equality (Parallel Track)

    - + cvc5-gg 0 23604.0652023203 @@ -135,7 +135,7 @@

    Equality (Parallel Track)

    - + Par4n 0 218763.275211151715 @@ -155,7 +155,7 @@

    Equality (Parallel Track)

    - + Vampire 0 83470.07688012517 @@ -164,7 +164,7 @@

    Equality (Parallel Track)

    - + Par4n 0 14173.31911033015 @@ -173,7 +173,7 @@

    Equality (Parallel Track)

    - + cvc5-gg 0 01200.00009253 @@ -193,7 +193,7 @@

    Equality (Parallel Track)

    - + Vampire 0 92713.04690922317 @@ -202,7 +202,7 @@

    Equality (Parallel Track)

    - + cvc5-gg 0 24.0652029233 @@ -211,7 +211,7 @@

    Equality (Parallel Track)

    - + Par4n 0 14989.95610142915 @@ -231,7 +231,7 @@

    Equality (Parallel Track)

    - + Vampire 0 8662.60681726026 @@ -240,7 +240,7 @@

    Equality (Parallel Track)

    - + cvc5-gg 0 276.0652023203 @@ -249,7 +249,7 @@

    Equality (Parallel Track)

    - + Par4n 0 0408.0000171717 @@ -273,7 +273,6 @@

    Equality (Parallel Track)

    - + - diff --git a/archive/2021/results/equality-single-query.html b/archive/2021/results/equality-single-query.html index 3f5ca662..35223240 100644 --- a/archive/2021/results/equality-single-query.html +++ b/archive/2021/results/equality-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    Equality (Single Query Track)

    Competition results for the Equality - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    Equality (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) VampireVampireVampire - - + + cvc5 - - + + Vampire - + @@ -131,7 +131,7 @@

    Equality (Single Query Track)

    - + 2020-CVC4n 0 1675 @@ -142,7 +142,7 @@

    Equality (Single Query Track)

    - + Vampire 0 1503 @@ -153,7 +153,7 @@

    Equality (Single Query Track)

    - + 2020-Vampiren 0 1493 @@ -164,7 +164,7 @@

    Equality (Single Query Track)

    - + Vampire - fixedn 0 1476 @@ -175,7 +175,7 @@

    Equality (Single Query Track)

    - + cvc5 0 1382 @@ -186,7 +186,7 @@

    Equality (Single Query Track)

    - + iProver - fixed2n 0 1012 @@ -197,7 +197,7 @@

    Equality (Single Query Track)

    - + iProver - fixedn 0 996 @@ -208,7 +208,7 @@

    Equality (Single Query Track)

    - + veriT 0 693 @@ -219,7 +219,7 @@

    Equality (Single Query Track)

    - + z3n 0 462 @@ -230,7 +230,7 @@

    Equality (Single Query Track)

    - + Yices2 0 349 @@ -241,7 +241,7 @@

    Equality (Single Query Track)

    - + SMTInterpol 0 221 @@ -252,7 +252,7 @@

    Equality (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -263,7 +263,7 @@

    Equality (Single Query Track)

    - + iProver 12 1086 @@ -285,7 +285,7 @@

    Equality (Single Query Track)

    - + 2020-CVC4n 0 16753559523.1943564676.97916755071168273402734 @@ -294,7 +294,7 @@

    Equality (Single Query Track)

    - + Vampire 0 15723697541.2253475934.33315724551117283702835 @@ -303,7 +303,7 @@

    Equality (Single Query Track)

    - + 2020-Vampiren 0 15673694311.7983457200.38415674721095284202811 @@ -312,7 +312,7 @@

    Equality (Single Query Track)

    - + Vampire - fixedn 0 15473695052.4193474601.19115474471100286202832 @@ -321,7 +321,7 @@

    Equality (Single Query Track)

    - + cvc5 0 13823765383.2523767884.95813822361146302703027 @@ -330,7 +330,7 @@

    Equality (Single Query Track)

    - + iProver - fixed2n 0 10544212963.4034068213.0311054219835335503126 @@ -339,7 +339,7 @@

    Equality (Single Query Track)

    - + iProver - fixedn 0 10404234709.2034084357.7011040216824336903110 @@ -348,7 +348,7 @@

    Equality (Single Query Track)

    - + veriT 0 6932573855.7012573844.5466930693216415522044 @@ -357,7 +357,7 @@

    Equality (Single Query Track)

    - + z3n 0 4622179799.1232180444.10946258404239515521390 @@ -366,7 +366,7 @@

    Equality (Single Query Track)

    - + Yices2 0 3493028078.4013028073.07634939310250815522508 @@ -375,7 +375,7 @@

    Equality (Single Query Track)

    - + SMTInterpol 0 2273330026.3913138872.2452279218263015522525 @@ -384,7 +384,7 @@

    Equality (Single Query Track)

    - + UltimateEliminator+MathSAT 0 015142.878638.235000285715520 @@ -393,7 +393,7 @@

    Equality (Single Query Track)

    - + iProver 13 11244102600.2593961871.7161124217907328503026 @@ -413,7 +413,7 @@

    Equality (Single Query Track)

    - + 2020-CVC4n 0 507353049.369357919.38650750709638062734 @@ -422,7 +422,7 @@

    Equality (Single Query Track)

    - + 2020-Vampiren 0 472270462.648181731.649472472013138062811 @@ -431,7 +431,7 @@

    Equality (Single Query Track)

    - + Vampire 0 455215471.936186423.238455455014838062835 @@ -440,7 +440,7 @@

    Equality (Single Query Track)

    - + Vampire - fixedn 0 447214802.265187221.228447447015638062832 @@ -449,7 +449,7 @@

    Equality (Single Query Track)

    - + cvc5 0 236547212.017549612.395236236036738063027 @@ -458,7 +458,7 @@

    Equality (Single Query Track)

    - + iProver - fixed2n 0 219470166.879457883.862219219038438063126 @@ -467,7 +467,7 @@

    Equality (Single Query Track)

    - + iProver 0 217453492.948442880.718217217038638063026 @@ -476,7 +476,7 @@

    Equality (Single Query Track)

    - + iProver - fixedn 0 216470261.272459690.661216216038738063110 @@ -485,7 +485,7 @@

    Equality (Single Query Track)

    - + z3n 0 58362832.59363361.1095858041139401390 @@ -494,7 +494,7 @@

    Equality (Single Query Track)

    - + Yices2 0 39516147.715516147.8643939043039402508 @@ -503,7 +503,7 @@

    Equality (Single Query Track)

    - + SMTInterpol 0 9568504.578527817.88199046039402525 @@ -512,7 +512,7 @@

    Equality (Single Query Track)

    - + UltimateEliminator+MathSAT 0 02399.7531394.12100046939400 @@ -521,7 +521,7 @@

    Equality (Single Query Track)

    - + veriT 0 0521861.826521856.52100046939402044 @@ -541,7 +541,7 @@

    Equality (Single Query Track)

    - + 2020-CVC4n 0 1168273673.825273957.59311680116819430472734 @@ -550,7 +550,7 @@

    Equality (Single Query Track)

    - + cvc5 0 1146285371.235285472.56311460114621630473027 @@ -559,7 +559,7 @@

    Equality (Single Query Track)

    - + Vampire 0 1117542062.928356713.17511170111724530472835 @@ -568,7 +568,7 @@

    Equality (Single Query Track)

    - + Vampire - fixedn 0 1100543850.113354606.28311000110026230472832 @@ -577,7 +577,7 @@

    Equality (Single Query Track)

    - + 2020-Vampiren 0 1095491049.149342668.73510950109526730472811 @@ -586,7 +586,7 @@

    Equality (Single Query Track)

    - + iProver - fixed2n 0 835809964.744678147.631835083552730473126 @@ -595,7 +595,7 @@

    Equality (Single Query Track)

    - + iProver - fixedn 0 824831647.931691867.04824082453830473110 @@ -604,7 +604,7 @@

    Equality (Single Query Track)

    - + veriT 0 693281078.045281071.766693069321934972044 @@ -613,7 +613,7 @@

    Equality (Single Query Track)

    - + z3n 0 404493182.144493202.608404040450834971390 @@ -622,7 +622,7 @@

    Equality (Single Query Track)

    - + Yices2 0 310740730.686740725.213310031060234972508 @@ -631,7 +631,7 @@

    Equality (Single Query Track)

    - + SMTInterpol 0 218887807.013854480.675218021869434972525 @@ -640,7 +640,7 @@

    Equality (Single Query Track)

    - + UltimateEliminator+MathSAT 0 04866.2992774.28200091234970 @@ -649,7 +649,7 @@

    Equality (Single Query Track)

    - + iProver 13 907716445.524589343.799907090745530473026 @@ -669,7 +669,7 @@

    Equality (Single Query Track)

    - + 2020-Vampiren 0 121186358.00478990.1151211367844319803179 @@ -678,7 +678,7 @@

    Equality (Single Query Track)

    - + Vampire 0 120687683.44479750.3481206364842320303203 @@ -687,7 +687,7 @@

    Equality (Single Query Track)

    - + Vampire - fixedn 0 118487676.70179802.3961184356828322503206 @@ -696,7 +696,7 @@

    Equality (Single Query Track)

    - + cvc5 0 97383416.91283382.5197314959343603436 @@ -705,7 +705,7 @@

    Equality (Single Query Track)

    - + 2020-CVC4n 0 95084075.27484044.29495013937345903459 @@ -714,7 +714,7 @@

    Equality (Single Query Track)

    - + iProver - fixed2n 0 82998719.36989353.068829196633358003353 @@ -723,7 +723,7 @@

    Equality (Single Query Track)

    - + iProver - fixedn 0 81898381.49789464.391818195623359103335 @@ -732,7 +732,7 @@

    Equality (Single Query Track)

    - + veriT 0 65053301.03953293.0966500650220715522128 @@ -741,7 +741,7 @@

    Equality (Single Query Track)

    - + z3n 0 42558880.07858877.52442554371243215522396 @@ -750,7 +750,7 @@

    Equality (Single Query Track)

    - + Yices2 0 28862205.66862198.45328837251256915522569 @@ -759,7 +759,7 @@

    Equality (Single Query Track)

    - + SMTInterpol 0 14266274.15865516.0371428134271515522702 @@ -768,7 +768,7 @@

    Equality (Single Query Track)

    - + UltimateEliminator+MathSAT 0 015142.878638.235000285715520 @@ -777,7 +777,7 @@

    Equality (Single Query Track)

    - + iProver 4 88998817.99688244.687889196693352003275 @@ -801,7 +801,6 @@

    Equality (Single Query Track)

    - + - diff --git a/archive/2021/results/equality-unsat-core.html b/archive/2021/results/equality-unsat-core.html index 9684b4ea..d04ac6f5 100644 --- a/archive/2021/results/equality-unsat-core.html +++ b/archive/2021/results/equality-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    Equality (Unsat Core Track)

    Competition results for the Equality - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    Equality (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5-uccvc5-uc - - + + @@ -126,7 +126,7 @@

    Equality (Unsat Core Track)

    - + 2020-CVC4-ucn 0 776683 @@ -137,7 +137,7 @@

    Equality (Unsat Core Track)

    - + cvc5-uc 0 770781 @@ -148,7 +148,7 @@

    Equality (Unsat Core Track)

    - + z3n 0 392891 @@ -159,7 +159,7 @@

    Equality (Unsat Core Track)

    - + SMTInterpol 0 287674 @@ -170,7 +170,7 @@

    Equality (Unsat Core Track)

    - + SMTInterpol-remus 0 177564 @@ -181,7 +181,7 @@

    Equality (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -192,7 +192,7 @@

    Equality (Unsat Core Track)

    - + Vampire 1 763559 @@ -214,7 +214,7 @@

    Equality (Unsat Core Track)

    - + 2020-CVC4-ucn 0 776683187942.876187944.051148 @@ -223,7 +223,7 @@

    Equality (Unsat Core Track)

    - + cvc5-uc 0 770781195370.792195369.484156 @@ -232,7 +232,7 @@

    Equality (Unsat Core Track)

    - + z3n 0 392891341122.603341093.494202 @@ -241,7 +241,7 @@

    Equality (Unsat Core Track)

    - + SMTInterpol 0 290423938780.208880134.638684 @@ -250,7 +250,7 @@

    Equality (Unsat Core Track)

    - + SMTInterpol-remus 0 2717701676096.1351452966.131684 @@ -259,7 +259,7 @@

    Equality (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 010956.2476970.1851 @@ -268,7 +268,7 @@

    Equality (Unsat Core Track)

    - + Vampire 1 776519272215.154140043.34778 @@ -292,7 +292,6 @@

    Equality (Unsat Core Track)

    - + - diff --git a/archive/2021/results/fp-single-query.html b/archive/2021/results/fp-single-query.html index 76685ce0..a894f8e2 100644 --- a/archive/2021/results/fp-single-query.html +++ b/archive/2021/results/fp-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    FP (Single Query Track)

    Competition results for the FP - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    FP (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    FP (Single Query Track)

    - + cvc5 0 1188 @@ -142,7 +142,7 @@

    FP (Single Query Track)

    - + z3n 0 1105 @@ -153,7 +153,7 @@

    FP (Single Query Track)

    - + 2020-CVC4n 0 948 @@ -164,7 +164,7 @@

    FP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 177 @@ -175,7 +175,7 @@

    FP (Single Query Track)

    - + 2019-Z3n 0 0 @@ -197,7 +197,7 @@

    FP (Single Query Track)

    - + cvc5 0 1188229822.009230850.00311881071081152152 @@ -206,7 +206,7 @@

    FP (Single Query Track)

    - + z3n 0 1105350581.881350531.8591105901015235232 @@ -215,7 +215,7 @@

    FP (Single Query Track)

    - + 2020-CVC4n 0 948370801.279370793.85494895853392276 @@ -224,7 +224,7 @@

    FP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 17737601.70435849.4391774173116324 @@ -233,7 +233,7 @@

    FP (Single Query Track)

    - + 2019-Z3n 0 0335382.701335318.4330001340192 @@ -253,7 +253,7 @@

    FP (Single Query Track)

    - + cvc5 0 10735750.71435761.3711071070111222152 @@ -262,7 +262,7 @@

    FP (Single Query Track)

    - + 2020-CVC4n 0 9557598.24857602.99495950231222276 @@ -271,7 +271,7 @@

    FP (Single Query Track)

    - + z3n 0 9067278.52867304.99990900281222232 @@ -280,7 +280,7 @@

    FP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 4537.19832.268440114122224 @@ -289,7 +289,7 @@

    FP (Single Query Track)

    - + 2019-Z3n 0 052414.57852413.5340001181222192 @@ -309,7 +309,7 @@

    FP (Single Query Track)

    - + cvc5 0 108158471.29559488.63210810108128231152 @@ -318,7 +318,7 @@

    FP (Single Query Track)

    - + z3n 0 1015151282.089151205.59210150101594231232 @@ -327,7 +327,7 @@

    FP (Single Query Track)

    - + 2020-CVC4n 0 853177603.031177590.868530853256231276 @@ -336,7 +336,7 @@

    FP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 1736275.9884297.752173017393623124 @@ -345,7 +345,7 @@

    FP (Single Query Track)

    - + 2019-Z3n 0 0151683.714151620.2980001109231192 @@ -365,7 +365,7 @@

    FP (Single Query Track)

    - + cvc5 0 10379083.5219027.347103746991303303 @@ -374,7 +374,7 @@

    FP (Single Query Track)

    - + z3n 0 87712964.87212936.6648775872463460 @@ -383,7 +383,7 @@

    FP (Single Query Track)

    - + 2020-CVC4n 0 81211650.92311637.09981222790528412 @@ -392,7 +392,7 @@

    FP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 1777394.9225652.631774173116327 @@ -401,7 +401,7 @@

    FP (Single Query Track)

    - + 2019-Z3n 0 013025.60512996.4830001340430 @@ -425,7 +425,6 @@

    FP (Single Query Track)

    - + - diff --git a/archive/2021/results/fparith-incremental.html b/archive/2021/results/fparith-incremental.html index 3b92c18e..2d2c5122 100644 --- a/archive/2021/results/fparith-incremental.html +++ b/archive/2021/results/fparith-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    FPArith (Incremental Track)

    Competition results for the FPArith - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    FPArith (Incremental Track)

    Parallel Performance cvc5-inc - - + + @@ -124,7 +124,7 @@

    FPArith (Incremental Track)

    - + 2019-CVC4-incn 0 2701200.01200.018801 @@ -133,7 +133,7 @@

    FPArith (Incremental Track)

    - + cvc5-inc 0 2531200.01200.020501 @@ -142,7 +142,7 @@

    FPArith (Incremental Track)

    - + UltimateEliminator+MathSAT 0 23150.6744.06222700 @@ -166,7 +166,6 @@

    FPArith (Incremental Track)

    - + - diff --git a/archive/2021/results/fparith-single-query.html b/archive/2021/results/fparith-single-query.html index 1dc20afc..da12d1b9 100644 --- a/archive/2021/results/fparith-single-query.html +++ b/archive/2021/results/fparith-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    FPArith (Single Query Track)

    Competition results for the FPArith - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    FPArith (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    FPArith (Single Query Track)

    - + cvc5 0 1426 @@ -142,7 +142,7 @@

    FPArith (Single Query Track)

    - + 2020-CVC4n 0 1164 @@ -153,7 +153,7 @@

    FPArith (Single Query Track)

    - + z3n 0 1105 @@ -164,7 +164,7 @@

    FPArith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 241 @@ -175,7 +175,7 @@

    FPArith (Single Query Track)

    - + 2019-Z3n 0 0 @@ -197,7 +197,7 @@

    FPArith (Single Query Track)

    - + cvc5 0 1426403573.732404604.597142633310933310295 @@ -206,7 +206,7 @@

    FPArith (Single Query Track)

    - + 2020-CVC4n 0 1164470968.29470961.01211643028625930358 @@ -215,7 +215,7 @@

    FPArith (Single Query Track)

    - + z3n 0 1105350581.881350531.8591105901015235417232 @@ -224,7 +224,7 @@

    FPArith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 24140483.65337937.147241631781516024 @@ -233,7 +233,7 @@

    FPArith (Single Query Track)

    - + 2019-Z3n 0 0376673.502376611.5810001545212220 @@ -253,7 +253,7 @@

    FPArith (Single Query Track)

    - + cvc5 0 33349148.19449161.4713333330221402295 @@ -262,7 +262,7 @@

    FPArith (Single Query Track)

    - + 2020-CVC4n 0 30273108.09173112.93023020531402358 @@ -271,7 +271,7 @@

    FPArith (Single Query Track)

    - + z3n 0 9067278.52867304.99990900281639232 @@ -280,7 +280,7 @@

    FPArith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 632257.1922093.61463630292140224 @@ -289,7 +289,7 @@

    FPArith (Single Query Track)

    - + 2019-Z3n 0 074146.73274147.1160002411516220 @@ -309,7 +309,7 @@

    FPArith (Single Query Track)

    - + cvc5 0 109359212.99960230.59510930109328636295 @@ -318,7 +318,7 @@

    FPArith (Single Query Track)

    - + z3n 0 1015151282.089151205.59210150101594648232 @@ -327,7 +327,7 @@

    FPArith (Single Query Track)

    - + 2020-CVC4n 0 862178190.488178178.4088620862259636358 @@ -336,7 +336,7 @@

    FPArith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 1786366.4964376.265178017894363624 @@ -345,7 +345,7 @@

    FPArith (Single Query Track)

    - + 2019-Z3n 0 0154944.192154880.9520001116641220 @@ -365,7 +365,7 @@

    FPArith (Single Query Track)

    - + cvc5 0 126612875.10412818.85712662679994910455 @@ -374,7 +374,7 @@

    FPArith (Single Query Track)

    - + 2020-CVC4n 0 102213911.07313897.20710222277957350500 @@ -383,7 +383,7 @@

    FPArith (Single Query Track)

    - + z3n 0 87712964.87212936.6648775872463417460 @@ -392,7 +392,7 @@

    FPArith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 23510148.4837633.08235571781522036 @@ -401,7 +401,7 @@

    FPArith (Single Query Track)

    - + 2019-Z3n 0 014711.61614682.5610001545212480 @@ -425,7 +425,6 @@

    FPArith (Single Query Track)

    - + - diff --git a/archive/2021/results/fparith-unsat-core.html b/archive/2021/results/fparith-unsat-core.html index 221d0b66..ebedb9f5 100644 --- a/archive/2021/results/fparith-unsat-core.html +++ b/archive/2021/results/fparith-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    FPArith (Unsat Core Track)

    Competition results for the FPArith - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    FPArith (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5-uccvc5-uc - - + + @@ -126,7 +126,7 @@

    FPArith (Unsat Core Track)

    - + 2020-CVC4-ucn 0 21 @@ -137,7 +137,7 @@

    FPArith (Unsat Core Track)

    - + cvc5-uc 0 21 @@ -148,7 +148,7 @@

    FPArith (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -170,7 +170,7 @@

    FPArith (Unsat Core Track)

    - + 2020-CVC4-ucn 0 211.7991.7990 @@ -179,7 +179,7 @@

    FPArith (Unsat Core Track)

    - + cvc5-uc 0 215.6585.6580 @@ -188,7 +188,7 @@

    FPArith (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 04.652.8780 @@ -212,7 +212,6 @@

    FPArith (Unsat Core Track)

    - + - diff --git a/archive/2021/results/fplra-single-query.html b/archive/2021/results/fplra-single-query.html index 41dcfa71..0f56128e 100644 --- a/archive/2021/results/fplra-single-query.html +++ b/archive/2021/results/fplra-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    FPLRA (Single Query Track)

    Competition results for the FPLRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    FPLRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + — - - + + cvc5 - + @@ -131,7 +131,7 @@

    FPLRA (Single Query Track)

    - + cvc5 0 15 @@ -142,7 +142,7 @@

    FPLRA (Single Query Track)

    - + 2020-CVC4n 0 15 @@ -153,7 +153,7 @@

    FPLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 4 @@ -175,7 +175,7 @@

    FPLRA (Single Query Track)

    - + cvc5 0 1512126.02612126.084151501210 @@ -184,7 +184,7 @@

    FPLRA (Single Query Track)

    - + 2020-CVC4n 0 1513065.71613065.818151501210 @@ -193,7 +193,7 @@

    FPLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 4199.402148.372440230 @@ -213,7 +213,7 @@

    FPLRA (Single Query Track)

    - + cvc5 0 152523.5182523.5771515021010 @@ -222,7 +222,7 @@

    FPLRA (Single Query Track)

    - + 2020-CVC4n 0 153463.0523463.1551515021010 @@ -231,7 +231,7 @@

    FPLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 4105.22775.13644013100 @@ -251,7 +251,7 @@

    FPLRA (Single Query Track)

    - + 2020-CVC4n 0 00.00.000002710 @@ -260,7 +260,7 @@

    FPLRA (Single Query Track)

    - + cvc5 0 00.00.000002710 @@ -269,7 +269,7 @@

    FPLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.00000270 @@ -289,7 +289,7 @@

    FPLRA (Single Query Track)

    - + cvc5 0 14273.12273.135141401311 @@ -298,7 +298,7 @@

    FPLRA (Single Query Track)

    - + 2020-CVC4n 0 13313.65313.657131301412 @@ -307,7 +307,7 @@

    FPLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 4199.402148.372440230 @@ -331,7 +331,6 @@

    FPLRA (Single Query Track)

    - + - diff --git a/archive/2021/results/largest-contribution-cloud.html b/archive/2021/results/largest-contribution-cloud.html index 3314c4c3..93d338ec 100644 --- a/archive/2021/results/largest-contribution-cloud.html +++ b/archive/2021/results/largest-contribution-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -105,7 +105,7 @@

    Parallel Performance

    - + SMTS cube-and-conquer 0.05 @@ -119,7 +119,7 @@

    Parallel Performance

    - + SMTS portfolio 0.0 @@ -148,7 +148,7 @@

    SAT Performance

    - + SMTS cube-and-conquer 0.05 @@ -162,7 +162,7 @@

    SAT Performance

    - + SMTS portfolio 0.0 @@ -191,7 +191,7 @@

    UNSAT Performance

    - + SMTS portfolio 0.0 @@ -220,7 +220,7 @@

    24s Performance

    - + SMTS cube-and-conquer 0.25 @@ -234,7 +234,7 @@

    24s Performance

    - + SMTS cube-and-conquer 0.0 @@ -266,7 +266,6 @@

    24s Performance

    - + - diff --git a/archive/2021/results/largest-contribution-incremental.html b/archive/2021/results/largest-contribution-incremental.html index 7368ad54..6e8bb05c 100644 --- a/archive/2021/results/largest-contribution-incremental.html +++ b/archive/2021/results/largest-contribution-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -90,27 +90,27 @@

    Winners

    - - + + - - - - + + + + - - + + - - - - + + + +
    Parallel Performance
    - + cvc5-inc - +
    @@ -130,7 +130,7 @@

    Parallel Performance

    - + cvc5-inc 0.07874008 @@ -144,7 +144,7 @@

    Parallel Performance

    - + cvc5-inc 0.07639309 @@ -158,7 +158,7 @@

    Parallel Performance

    - + Yices2 incremental 0.0225135 @@ -172,7 +172,7 @@

    Parallel Performance

    - + Yices2 incremental 0.00712094 @@ -186,7 +186,7 @@

    Parallel Performance

    - + SMTInterpol 0.00221676 @@ -200,7 +200,7 @@

    Parallel Performance

    - + cvc5-inc 0.001728 @@ -214,7 +214,7 @@

    Parallel Performance

    - + Yices2 incremental 0.00110993 @@ -228,7 +228,7 @@

    Parallel Performance

    - + STP 0.00040213 @@ -242,7 +242,7 @@

    Parallel Performance

    - + SMTInterpol 0.00013757 @@ -256,7 +256,7 @@

    Parallel Performance

    - + OpenSMT 0.00012553 @@ -270,7 +270,7 @@

    Parallel Performance

    - + Yices2 incremental 0.0 @@ -284,7 +284,7 @@

    Parallel Performance

    - + cvc5-inc 0.0 @@ -322,7 +322,6 @@

    Parallel Performance

    - + - diff --git a/archive/2021/results/largest-contribution-model-validation.html b/archive/2021/results/largest-contribution-model-validation.html index dbbe51ce..6830249f 100644 --- a/archive/2021/results/largest-contribution-model-validation.html +++ b/archive/2021/results/largest-contribution-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -90,35 +90,35 @@

    Winners

    - + - - + + - - - - + + + + - + - - + + - - - - + + + +
    Sequential PerformanceParallel Performance
    - + cvc5-mv - + - + cvc5-mv - +

    Sequential Performance

    @@ -136,7 +136,7 @@

    Sequential Performance

    - + cvc5-mv 0.00698961 @@ -150,7 +150,7 @@

    Sequential Performance

    - + Bitwuzla 0.00308487 @@ -164,7 +164,7 @@

    Sequential Performance

    - + SMTInterpol 0.00171233 @@ -178,7 +178,7 @@

    Sequential Performance

    - + Yices2 model-validation 0.00014791 @@ -192,7 +192,7 @@

    Sequential Performance

    - + Yices2 model-validation 5.499e-05 @@ -206,7 +206,7 @@

    Sequential Performance

    - + Yices2 model-validation 0.0 @@ -235,7 +235,7 @@

    Parallel Performance

    - + cvc5-mv 0.00698961 @@ -249,7 +249,7 @@

    Parallel Performance

    - + Bitwuzla 0.00257001 @@ -263,7 +263,7 @@

    Parallel Performance

    - + SMTInterpol 0.00171233 @@ -277,7 +277,7 @@

    Parallel Performance

    - + Yices2 model-validation 0.00014791 @@ -291,7 +291,7 @@

    Parallel Performance

    - + Yices2 model-validation 5.499e-05 @@ -305,7 +305,7 @@

    Parallel Performance

    - + Yices2 model-validation 0.0 @@ -343,7 +343,6 @@

    Parallel Performance

    - + - diff --git a/archive/2021/results/largest-contribution-single-query.html b/archive/2021/results/largest-contribution-single-query.html index 8ee7eb7b..e3b33c9c 100644 --- a/archive/2021/results/largest-contribution-single-query.html +++ b/archive/2021/results/largest-contribution-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -90,59 +90,59 @@

    Winners

    - + - - + + - - + + - - + + - - + + - + - + - - + + - - + + - - + + - - + + - +
    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24 seconds Performance (parallel)
    - + Vampire - + - + iProver - + - + cvc5 - + - + cvc5 - + - + Vampire - +

    Sequential Performance

    @@ -160,7 +160,7 @@

    Sequential Performance

    - + Vampire 0.0489982 @@ -174,7 +174,7 @@

    Sequential Performance

    - + Vampire 0.01788343 @@ -188,7 +188,7 @@

    Sequential Performance

    - + cvc5 0.01710729 @@ -202,7 +202,7 @@

    Sequential Performance

    - + Yices2 0.00869686 @@ -216,7 +216,7 @@

    Sequential Performance

    - + Yices2-QS 0.00278091 @@ -230,7 +230,7 @@

    Sequential Performance

    - + cvc5 0.00250854 @@ -244,7 +244,7 @@

    Sequential Performance

    - + cvc5 0.00232601 @@ -258,7 +258,7 @@

    Sequential Performance

    - + cvc5 0.00132654 @@ -272,7 +272,7 @@

    Sequential Performance

    - + cvc5 0.00098018 @@ -286,7 +286,7 @@

    Sequential Performance

    - + cvc5 0.00086324 @@ -300,7 +300,7 @@

    Sequential Performance

    - + Bitwuzla 0.00057358 @@ -314,7 +314,7 @@

    Sequential Performance

    - + cvc5 0.00027728 @@ -328,7 +328,7 @@

    Sequential Performance

    - + Yices2 0.0001749 @@ -342,7 +342,7 @@

    Sequential Performance

    - + OpenSMT 0.00011 @@ -371,7 +371,7 @@

    Parallel Performance

    - + iProver 0.08486995 @@ -385,7 +385,7 @@

    Parallel Performance

    - + Vampire 0.01847402 @@ -399,7 +399,7 @@

    Parallel Performance

    - + cvc5 0.01258234 @@ -413,7 +413,7 @@

    Parallel Performance

    - + Yices2 0.00868235 @@ -427,7 +427,7 @@

    Parallel Performance

    - + Yices2-QS 0.00269209 @@ -441,7 +441,7 @@

    Parallel Performance

    - + cvc5 0.00250854 @@ -455,7 +455,7 @@

    Parallel Performance

    - + cvc5 0.0022348 @@ -469,7 +469,7 @@

    Parallel Performance

    - + cvc5 0.00132654 @@ -483,7 +483,7 @@

    Parallel Performance

    - + cvc5 0.00098018 @@ -497,7 +497,7 @@

    Parallel Performance

    - + cvc5 0.00086324 @@ -511,7 +511,7 @@

    Parallel Performance

    - + Bitwuzla 0.00043296 @@ -525,7 +525,7 @@

    Parallel Performance

    - + cvc5 0.00027713 @@ -539,7 +539,7 @@

    Parallel Performance

    - + Yices2 0.0001749 @@ -553,7 +553,7 @@

    Parallel Performance

    - + OpenSMT 0.00011 @@ -582,7 +582,7 @@

    SAT Performance

    - + cvc5 0.10587024 @@ -596,7 +596,7 @@

    SAT Performance

    - + UltimateEliminator+MathSAT 0.08416589 @@ -610,7 +610,7 @@

    SAT Performance

    - + Vampire 0.02936727 @@ -624,7 +624,7 @@

    SAT Performance

    - + cvc5 0.00616228 @@ -638,7 +638,7 @@

    SAT Performance

    - + Yices2-QS 0.00553133 @@ -652,7 +652,7 @@

    SAT Performance

    - + cvc5 0.00262204 @@ -666,7 +666,7 @@

    SAT Performance

    - + cvc5 0.00157784 @@ -680,7 +680,7 @@

    SAT Performance

    - + cvc5 0.00145186 @@ -694,7 +694,7 @@

    SAT Performance

    - + cvc5 0.00125507 @@ -708,7 +708,7 @@

    SAT Performance

    - + Bitwuzla 0.00093358 @@ -722,7 +722,7 @@

    SAT Performance

    - + cvc5 0.00093134 @@ -736,7 +736,7 @@

    SAT Performance

    - + cvc5 0.00055932 @@ -750,7 +750,7 @@

    SAT Performance

    - + cvc5 0.00013178 @@ -764,7 +764,7 @@

    SAT Performance

    - + Yices2 0.00010454 @@ -778,7 +778,7 @@

    SAT Performance

    - + Yices2 9.601e-05 @@ -807,7 +807,7 @@

    UNSAT Performance

    - + cvc5 0.05633672 @@ -821,7 +821,7 @@

    UNSAT Performance

    - + Yices2 0.02120632 @@ -835,7 +835,7 @@

    UNSAT Performance

    - + cvc5 0.01061534 @@ -849,7 +849,7 @@

    UNSAT Performance

    - + Vampire 0.00682471 @@ -863,7 +863,7 @@

    UNSAT Performance

    - + cvc5 0.00270744 @@ -877,7 +877,7 @@

    UNSAT Performance

    - + cvc5 0.00236879 @@ -891,7 +891,7 @@

    UNSAT Performance

    - + cvc5 0.0013437 @@ -905,7 +905,7 @@

    UNSAT Performance

    - + cvc5 0.00110459 @@ -919,7 +919,7 @@

    UNSAT Performance

    - + Vampire 0.00102021 @@ -933,7 +933,7 @@

    UNSAT Performance

    - + Yices2 0.00050192 @@ -947,7 +947,7 @@

    UNSAT Performance

    - + cvc5 0.00044916 @@ -961,7 +961,7 @@

    UNSAT Performance

    - + SMTInterpol 0.00034388 @@ -975,7 +975,7 @@

    UNSAT Performance

    - + Bitwuzla 0.00033382 @@ -989,7 +989,7 @@

    UNSAT Performance

    - + OpenSMT 0.00016838 @@ -1018,7 +1018,7 @@

    24s Performance

    - + Vampire 0.03693418 @@ -1032,7 +1032,7 @@

    24s Performance

    - + cvc5 0.03523494 @@ -1046,7 +1046,7 @@

    24s Performance

    - + Vampire 0.02659309 @@ -1060,7 +1060,7 @@

    24s Performance

    - + Yices2 0.01948202 @@ -1074,7 +1074,7 @@

    24s Performance

    - + Yices2 0.01905708 @@ -1088,7 +1088,7 @@

    24s Performance

    - + Yices2-QS 0.00391344 @@ -1102,7 +1102,7 @@

    24s Performance

    - + cvc5 0.00244993 @@ -1116,7 +1116,7 @@

    24s Performance

    - + Yices2 0.00213838 @@ -1130,7 +1130,7 @@

    24s Performance

    - + cvc5 0.00116871 @@ -1144,7 +1144,7 @@

    24s Performance

    - + cvc5 0.00094534 @@ -1158,7 +1158,7 @@

    24s Performance

    - + Yices2 0.0009399 @@ -1172,7 +1172,7 @@

    24s Performance

    - + Bitwuzla 0.00046524 @@ -1186,7 +1186,7 @@

    24s Performance

    - + Yices2 0.0004051 @@ -1200,7 +1200,7 @@

    24s Performance

    - + veriT 6.377e-05 @@ -1232,7 +1232,6 @@

    24s Performance

    - + - diff --git a/archive/2021/results/largest-contribution-unsat-core.html b/archive/2021/results/largest-contribution-unsat-core.html index 22e3835e..0c412285 100644 --- a/archive/2021/results/largest-contribution-unsat-core.html +++ b/archive/2021/results/largest-contribution-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -90,35 +90,35 @@

    Winners

    - + - - + + - - - - + + + + - + - - + + - - - - + + + +
    Sequential PerformanceParallel Performance
    - + cvc5-uc - + - + cvc5-uc - +

    Sequential Performance

    @@ -136,7 +136,7 @@

    Sequential Performance

    - + cvc5-uc 0.30384024 @@ -150,7 +150,7 @@

    Sequential Performance

    - + cvc5-uc 0.04913906 @@ -164,7 +164,7 @@

    Sequential Performance

    - + cvc5-uc 0.01421682 @@ -178,7 +178,7 @@

    Sequential Performance

    - + cvc5-uc 0.01014006 @@ -192,7 +192,7 @@

    Sequential Performance

    - + Yices2 0.00719293 @@ -206,7 +206,7 @@

    Sequential Performance

    - + cvc5-uc 0.00595642 @@ -220,7 +220,7 @@

    Sequential Performance

    - + cvc5-uc 0.00269884 @@ -234,7 +234,7 @@

    Sequential Performance

    - + cvc5-uc 0.00139313 @@ -248,7 +248,7 @@

    Sequential Performance

    - + cvc5-uc 0.0006712 @@ -262,7 +262,7 @@

    Sequential Performance

    - + Yices2 0.00061889 @@ -291,7 +291,7 @@

    Parallel Performance

    - + cvc5-uc 0.30304423 @@ -305,7 +305,7 @@

    Parallel Performance

    - + cvc5-uc 0.04883744 @@ -319,7 +319,7 @@

    Parallel Performance

    - + cvc5-uc 0.01304166 @@ -333,7 +333,7 @@

    Parallel Performance

    - + cvc5-uc 0.00908387 @@ -347,7 +347,7 @@

    Parallel Performance

    - + Yices2 0.0070977 @@ -361,7 +361,7 @@

    Parallel Performance

    - + cvc5-uc 0.00595642 @@ -375,7 +375,7 @@

    Parallel Performance

    - + cvc5-uc 0.00269884 @@ -389,7 +389,7 @@

    Parallel Performance

    - + cvc5-uc 0.00139313 @@ -403,7 +403,7 @@

    Parallel Performance

    - + cvc5-uc 0.00067015 @@ -417,7 +417,7 @@

    Parallel Performance

    - + Yices2 0.00061813 @@ -455,7 +455,6 @@

    Parallel Performance

    - + - diff --git a/archive/2021/results/lia-incremental.html b/archive/2021/results/lia-incremental.html index ba9e5660..d19c3fb1 100644 --- a/archive/2021/results/lia-incremental.html +++ b/archive/2021/results/lia-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    LIA (Incremental Track)

    Competition results for the LIA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    LIA (Incremental Track)

    Parallel Performance cvc5-inc - - + + @@ -124,7 +124,7 @@

    LIA (Incremental Track)

    - + 2020-z3n 0 253938.6696.59900 @@ -133,7 +133,7 @@

    LIA (Incremental Track)

    - + z3n 0 253939.0418.12700 @@ -142,7 +142,7 @@

    LIA (Incremental Track)

    - + cvc5-inc 0 2539326.19821.3700 @@ -151,7 +151,7 @@

    LIA (Incremental Track)

    - + 2020-CVC4-incn 0 2539361.68360.35900 @@ -160,7 +160,7 @@

    LIA (Incremental Track)

    - + UltimateEliminator+MathSAT 0 25393281.092166.31900 @@ -169,7 +169,7 @@

    LIA (Incremental Track)

    - + SMTInterpol 0 2539182.76529.91420 @@ -193,7 +193,6 @@

    LIA (Incremental Track)

    - + - diff --git a/archive/2021/results/lia-single-query.html b/archive/2021/results/lia-single-query.html index da398c1d..c129480e 100644 --- a/archive/2021/results/lia-single-query.html +++ b/archive/2021/results/lia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    LIA (Single Query Track)

    Competition results for the LIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    LIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    LIA (Single Query Track)

    - + 2020-z3n 0 191 @@ -142,7 +142,7 @@

    LIA (Single Query Track)

    - + 2020-CVC4n 0 191 @@ -153,7 +153,7 @@

    LIA (Single Query Track)

    - + cvc5 - fixedn 0 191 @@ -164,7 +164,7 @@

    LIA (Single Query Track)

    - + cvc5 0 191 @@ -175,7 +175,7 @@

    LIA (Single Query Track)

    - + z3n 0 187 @@ -186,7 +186,7 @@

    LIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 147 @@ -197,7 +197,7 @@

    LIA (Single Query Track)

    - + Vampire 0 108 @@ -208,7 +208,7 @@

    LIA (Single Query Track)

    - + Vampire - fixedn 0 106 @@ -219,7 +219,7 @@

    LIA (Single Query Track)

    - + SMTInterpol 0 62 @@ -230,7 +230,7 @@

    LIA (Single Query Track)

    - + veriT 0 49 @@ -241,7 +241,7 @@

    LIA (Single Query Track)

    - + iProver 0 49 @@ -263,7 +263,7 @@

    LIA (Single Query Track)

    - + 2020-z3n 0 1918.8288.8481918310800 @@ -272,7 +272,7 @@

    LIA (Single Query Track)

    - + 2020-CVC4n 0 19122.72222.6691918310800 @@ -281,7 +281,7 @@

    LIA (Single Query Track)

    - + cvc5 0 19167.17664.521918310800 @@ -290,7 +290,7 @@

    LIA (Single Query Track)

    - + cvc5 - fixedn 0 19165.53365.51918310800 @@ -299,7 +299,7 @@

    LIA (Single Query Track)

    - + z3n 0 1874806.1654806.2041878310444 @@ -308,7 +308,7 @@

    LIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 14761011.45159483.947147431044444 @@ -317,7 +317,7 @@

    LIA (Single Query Track)

    - + Vampire 0 108100933.93100021.87410841048383 @@ -326,7 +326,7 @@

    LIA (Single Query Track)

    - + Vampire - fixedn 0 106105418.727100250.42110621048583 @@ -335,7 +335,7 @@

    LIA (Single Query Track)

    - + SMTInterpol 0 6263791.37463238.2236245812945 @@ -344,7 +344,7 @@

    LIA (Single Query Track)

    - + veriT 0 4916178.35716171.6734904914213 @@ -353,7 +353,7 @@

    LIA (Single Query Track)

    - + iProver 0 49162611.653162186.14249049142135 @@ -373,7 +373,7 @@

    LIA (Single Query Track)

    - + z3n 0 833.2813.3098383001084 @@ -382,7 +382,7 @@

    LIA (Single Query Track)

    - + 2020-z3n 0 834.4044.4198383001080 @@ -391,7 +391,7 @@

    LIA (Single Query Track)

    - + 2020-CVC4n 0 8318.1418.1148383001080 @@ -400,7 +400,7 @@

    LIA (Single Query Track)

    - + cvc5 0 8358.78656.188383001080 @@ -409,7 +409,7 @@

    LIA (Single Query Track)

    - + cvc5 - fixedn 0 8356.99957.0148383001080 @@ -418,7 +418,7 @@

    LIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 4353730.53852844.234434304010844 @@ -427,7 +427,7 @@

    LIA (Single Query Track)

    - + SMTInterpol 0 451235.57750901.0744407910845 @@ -436,7 +436,7 @@

    LIA (Single Query Track)

    - + Vampire 0 494807.89194802.6614407910883 @@ -445,7 +445,7 @@

    LIA (Single Query Track)

    - + Vampire - fixedn 0 2100800.5497192.1412208110883 @@ -454,7 +454,7 @@

    LIA (Single Query Track)

    - + veriT 0 0101.19694.3770008310813 @@ -463,7 +463,7 @@

    LIA (Single Query Track)

    - + iProver 0 097200.97497201.5400083108135 @@ -483,7 +483,7 @@

    LIA (Single Query Track)

    - + 2020-z3n 0 1084.4244.42910801080830 @@ -492,7 +492,7 @@

    LIA (Single Query Track)

    - + 2020-CVC4n 0 1084.5814.55510801080830 @@ -501,7 +501,7 @@

    LIA (Single Query Track)

    - + cvc5 0 1088.398.3410801080830 @@ -510,7 +510,7 @@

    LIA (Single Query Track)

    - + cvc5 - fixedn 0 1088.5348.48610801080830 @@ -519,7 +519,7 @@

    LIA (Single Query Track)

    - + Vampire - fixedn 0 1044618.1873058.28104010448383 @@ -528,7 +528,7 @@

    LIA (Single Query Track)

    - + z3n 0 1044802.8834802.89410401044834 @@ -537,7 +537,7 @@

    LIA (Single Query Track)

    - + Vampire 0 1046126.0395219.213104010448383 @@ -546,7 +546,7 @@

    LIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 1047280.9136639.713104010448344 @@ -555,7 +555,7 @@

    LIA (Single Query Track)

    - + SMTInterpol 0 5812555.79712337.1558058508345 @@ -564,7 +564,7 @@

    LIA (Single Query Track)

    - + veriT 0 4916077.16116077.29649049598313 @@ -573,7 +573,7 @@

    LIA (Single Query Track)

    - + iProver 0 4965410.67964984.602490495983135 @@ -593,7 +593,7 @@

    LIA (Single Query Track)

    - + 2020-z3n 0 1918.8288.8481918310800 @@ -602,7 +602,7 @@

    LIA (Single Query Track)

    - + 2020-CVC4n 0 19122.72222.6691918310800 @@ -611,7 +611,7 @@

    LIA (Single Query Track)

    - + cvc5 0 19167.17664.521918310800 @@ -620,7 +620,7 @@

    LIA (Single Query Track)

    - + cvc5 - fixedn 0 19165.53365.51918310800 @@ -629,7 +629,7 @@

    LIA (Single Query Track)

    - + z3n 0 187102.165102.2041878310444 @@ -638,7 +638,7 @@

    LIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 1132788.3032357.20611321927878 @@ -647,7 +647,7 @@

    LIA (Single Query Track)

    - + Vampire 0 1042283.522177.60610441008787 @@ -656,7 +656,7 @@

    LIA (Single Query Track)

    - + Vampire - fixedn 0 1002311.2772225.1571002989190 @@ -665,7 +665,7 @@

    LIA (Single Query Track)

    - + SMTInterpol 0 622032.2221885.7136245812973 @@ -674,7 +674,7 @@

    LIA (Single Query Track)

    - + veriT 0 49479.917473.1864904914215 @@ -683,7 +683,7 @@

    LIA (Single Query Track)

    - + iProver 0 483728.8553412.83448048143136 @@ -707,7 +707,6 @@

    LIA (Single Query Track)

    - + - diff --git a/archive/2021/results/lia-unsat-core.html b/archive/2021/results/lia-unsat-core.html index f334f50a..e7fe97ee 100644 --- a/archive/2021/results/lia-unsat-core.html +++ b/archive/2021/results/lia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    LIA (Unsat Core Track)

    Competition results for the LIA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    LIA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5-uccvc5-uc - - + + @@ -126,7 +126,7 @@

    LIA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 9 @@ -137,7 +137,7 @@

    LIA (Unsat Core Track)

    - + z3n 0 9 @@ -148,7 +148,7 @@

    LIA (Unsat Core Track)

    - + cvc5-uc 0 8 @@ -159,7 +159,7 @@

    LIA (Unsat Core Track)

    - + Vampire 0 1 @@ -170,7 +170,7 @@

    LIA (Unsat Core Track)

    - + SMTInterpol-remus 0 0 @@ -181,7 +181,7 @@

    LIA (Unsat Core Track)

    - + SMTInterpol 0 0 @@ -192,7 +192,7 @@

    LIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 1 4 @@ -214,7 +214,7 @@

    LIA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 911.17111.2260 @@ -223,7 +223,7 @@

    LIA (Unsat Core Track)

    - + z3n 0 91672.221672.8561 @@ -232,7 +232,7 @@

    LIA (Unsat Core Track)

    - + cvc5-uc 0 81212.7641212.7621 @@ -241,7 +241,7 @@

    LIA (Unsat Core Track)

    - + Vampire 0 113034.41112376.04110 @@ -250,7 +250,7 @@

    LIA (Unsat Core Track)

    - + SMTInterpol-remus 0 023522.64923177.76215 @@ -259,7 +259,7 @@

    LIA (Unsat Core Track)

    - + SMTInterpol 0 023525.89123181.93215 @@ -268,7 +268,7 @@

    LIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 1 416821.32115732.05911 @@ -292,7 +292,6 @@

    LIA (Unsat Core Track)

    - + - diff --git a/archive/2021/results/lra-cloud.html b/archive/2021/results/lra-cloud.html index 5ab238d8..591d0299 100644 --- a/archive/2021/results/lra-cloud.html +++ b/archive/2021/results/lra-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    LRA (Cloud Track)

    Competition results for the LRA - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    LRA (Cloud Track)

    - + Par4n 0 613060.0776241010 @@ -126,7 +126,7 @@

    LRA (Cloud Track)

    - + Vampire 0 413394.4574041111 @@ -135,7 +135,7 @@

    LRA (Cloud Track)

    - + cvc5-gg 0 00.0000160 @@ -155,7 +155,7 @@

    LRA (Cloud Track)

    - + Par4n 0 2601.02522001410 @@ -164,7 +164,7 @@

    LRA (Cloud Track)

    - + cvc5-gg 0 00.00002140 @@ -173,7 +173,7 @@

    LRA (Cloud Track)

    - + Vampire 0 02400.000021411 @@ -193,7 +193,7 @@

    LRA (Cloud Track)

    - + Vampire 0 47394.4574046611 @@ -202,7 +202,7 @@

    LRA (Cloud Track)

    - + Par4n 0 48859.0524047510 @@ -211,7 +211,7 @@

    LRA (Cloud Track)

    - + cvc5-gg 0 00.00001150 @@ -231,7 +231,7 @@

    LRA (Cloud Track)

    - + Par4n 0 3313.7983121313 @@ -240,7 +240,7 @@

    LRA (Cloud Track)

    - + cvc5-gg 0 00.0000160 @@ -249,7 +249,7 @@

    LRA (Cloud Track)

    - + Vampire 0 0360.00001515 @@ -273,7 +273,6 @@

    LRA (Cloud Track)

    - + - diff --git a/archive/2021/results/lra-incremental.html b/archive/2021/results/lra-incremental.html index 9fba0b82..5f67a704 100644 --- a/archive/2021/results/lra-incremental.html +++ b/archive/2021/results/lra-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    LRA (Incremental Track)

    Competition results for the LRA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    LRA (Incremental Track)

    Parallel Performance cvc5-inc - - + + @@ -124,7 +124,7 @@

    LRA (Incremental Track)

    - + cvc5-inc 0 1596973.53172.75900 @@ -133,7 +133,7 @@

    LRA (Incremental Track)

    - + 2020-CVC4-incn 0 15969127.696126.89200 @@ -142,7 +142,7 @@

    LRA (Incremental Track)

    - + UltimateEliminator+MathSAT 0 15969463.521330.64100 @@ -151,7 +151,7 @@

    LRA (Incremental Track)

    - + 2020-z3n 0 135853612.1483611.85723843 @@ -160,7 +160,7 @@

    LRA (Incremental Track)

    - + z3n 0 135843614.6153614.30123853 @@ -169,7 +169,7 @@

    LRA (Incremental Track)

    - + SMTInterpol 0 127191232.1811211.632501 @@ -193,7 +193,6 @@

    LRA (Incremental Track)

    - + - diff --git a/archive/2021/results/lra-parallel.html b/archive/2021/results/lra-parallel.html index cdd2f0b9..7af55253 100644 --- a/archive/2021/results/lra-parallel.html +++ b/archive/2021/results/lra-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    LRA (Parallel Track)

    Competition results for the LRA - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    LRA (Parallel Track)

    - + Par4n 0 614266.6986241111 @@ -126,7 +126,7 @@

    LRA (Parallel Track)

    - + Vampire 0 415757.9034041313 @@ -135,7 +135,7 @@

    LRA (Parallel Track)

    - + cvc5-gg 0 00.0000170 @@ -155,7 +155,7 @@

    LRA (Parallel Track)

    - + Par4n 0 2609.59722001511 @@ -164,7 +164,7 @@

    LRA (Parallel Track)

    - + cvc5-gg 0 00.00002150 @@ -173,7 +173,7 @@

    LRA (Parallel Track)

    - + Vampire 0 02400.000021513 @@ -193,7 +193,7 @@

    LRA (Parallel Track)

    - + Vampire 0 49757.9034048513 @@ -202,7 +202,7 @@

    LRA (Parallel Track)

    - + Par4n 0 410057.1014048511 @@ -211,7 +211,7 @@

    LRA (Parallel Track)

    - + cvc5-gg 0 00.00001250 @@ -231,7 +231,7 @@

    LRA (Parallel Track)

    - + Par4n 0 3337.8513121414 @@ -240,7 +240,7 @@

    LRA (Parallel Track)

    - + cvc5-gg 0 00.0000170 @@ -249,7 +249,7 @@

    LRA (Parallel Track)

    - + Vampire 0 0408.00001717 @@ -273,7 +273,6 @@

    LRA (Parallel Track)

    - + - diff --git a/archive/2021/results/lra-single-query.html b/archive/2021/results/lra-single-query.html index 13f38e7b..916af61e 100644 --- a/archive/2021/results/lra-single-query.html +++ b/archive/2021/results/lra-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    LRA (Single Query Track)

    Competition results for the LRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    LRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + Vampire - - + + cvc5 - + @@ -131,7 +131,7 @@

    LRA (Single Query Track)

    - + z3n 0 954 @@ -142,7 +142,7 @@

    LRA (Single Query Track)

    - + 2019-Par4n 0 932 @@ -153,7 +153,7 @@

    LRA (Single Query Track)

    - + 2020-z3n 0 927 @@ -164,7 +164,7 @@

    LRA (Single Query Track)

    - + 2020-CVC4n 0 824 @@ -175,7 +175,7 @@

    LRA (Single Query Track)

    - + cvc5 0 823 @@ -186,7 +186,7 @@

    LRA (Single Query Track)

    - + cvc5 - fixedn 0 821 @@ -197,7 +197,7 @@

    LRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 743 @@ -208,7 +208,7 @@

    LRA (Single Query Track)

    - + Vampire 0 485 @@ -219,7 +219,7 @@

    LRA (Single Query Track)

    - + Vampire - fixedn 0 477 @@ -230,7 +230,7 @@

    LRA (Single Query Track)

    - + iProver 0 21 @@ -241,7 +241,7 @@

    LRA (Single Query Track)

    - + SMTInterpol 0 5 @@ -263,7 +263,7 @@

    LRA (Single Query Track)

    - + z3n 0 954101820.11101792.6089543885664949 @@ -272,7 +272,7 @@

    LRA (Single Query Track)

    - + 2019-Par4n 0 944118651.87994801.0149443885565959 @@ -281,7 +281,7 @@

    LRA (Single Query Track)

    - + 2020-z3n 0 927136285.382136179.9429273855427676 @@ -290,7 +290,7 @@

    LRA (Single Query Track)

    - + 2020-CVC4n 0 824254522.429255448.545824340484179179 @@ -299,7 +299,7 @@

    LRA (Single Query Track)

    - + cvc5 0 823231772.333231742.926823346477180180 @@ -308,7 +308,7 @@

    LRA (Single Query Track)

    - + cvc5 - fixedn 0 821233580.078233599.889821347474182182 @@ -317,7 +317,7 @@

    LRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 744342701.202336206.983744284460259253 @@ -326,7 +326,7 @@

    LRA (Single Query Track)

    - + Vampire 0 487635765.185620829.6314870487516516 @@ -335,7 +335,7 @@

    LRA (Single Query Track)

    - + Vampire - fixedn 0 478624703.32620770.6864780478525516 @@ -344,7 +344,7 @@

    LRA (Single Query Track)

    - + iProver 0 231188057.8471179056.05923023980925 @@ -353,7 +353,7 @@

    LRA (Single Query Track)

    - + SMTInterpol 0 510675.7128997.8915149984 @@ -373,7 +373,7 @@

    LRA (Single Query Track)

    - + z3n 0 38820399.48220400.01638838801160449 @@ -382,7 +382,7 @@

    LRA (Single Query Track)

    - + 2019-Par4n 0 38830407.41321834.49238838801160459 @@ -391,7 +391,7 @@

    LRA (Single Query Track)

    - + 2020-z3n 0 38526734.45126731.45838538501460476 @@ -400,7 +400,7 @@

    LRA (Single Query Track)

    - + cvc5 - fixedn 0 34767204.78267202.918347347052604182 @@ -409,7 +409,7 @@

    LRA (Single Query Track)

    - + cvc5 0 34668075.45668101.985346346053604180 @@ -418,7 +418,7 @@

    LRA (Single Query Track)

    - + 2020-CVC4n 0 34087208.3487727.004340340059604179 @@ -427,7 +427,7 @@

    LRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 284143506.762141336.3952842840115604253 @@ -436,7 +436,7 @@

    LRA (Single Query Track)

    - + SMTInterpol 0 19054.4498189.9491103986044 @@ -445,7 +445,7 @@

    LRA (Single Query Track)

    - + Vampire 0 0489600.39478789.06000399604516 @@ -454,7 +454,7 @@

    LRA (Single Query Track)

    - + iProver 0 0478800.0478800.0000399604925 @@ -463,7 +463,7 @@

    LRA (Single Query Track)

    - + Vampire - fixedn 0 0478800.0478800.0000399604516 @@ -483,7 +483,7 @@

    LRA (Single Query Track)

    - + z3n 0 56663420.62863392.59256605662341449 @@ -492,7 +492,7 @@

    LRA (Single Query Track)

    - + 2019-Par4n 0 55670244.46654966.52255605563341459 @@ -501,7 +501,7 @@

    LRA (Single Query Track)

    - + 2020-z3n 0 54291550.93191448.48554205424741476 @@ -510,7 +510,7 @@

    LRA (Single Query Track)

    - + Vampire 0 487128164.795124040.5714870487102414516 @@ -519,7 +519,7 @@

    LRA (Single Query Track)

    - + 2020-CVC4n 0 484149314.089149721.5414840484105414179 @@ -528,7 +528,7 @@

    LRA (Single Query Track)

    - + Vampire - fixedn 0 478127903.32123970.6864780478111414516 @@ -537,7 +537,7 @@

    LRA (Single Query Track)

    - + cvc5 0 477145696.877145640.9414770477112414180 @@ -546,7 +546,7 @@

    LRA (Single Query Track)

    - + cvc5 - fixedn 0 474148375.296148396.9714740474115414182 @@ -555,7 +555,7 @@

    LRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 460181194.44176870.5884600460129414253 @@ -564,7 +564,7 @@

    LRA (Single Query Track)

    - + iProver 0 23691257.847682256.05923023566414925 @@ -573,7 +573,7 @@

    LRA (Single Query Track)

    - + SMTInterpol 0 41584.811793.6984045854144 @@ -593,7 +593,7 @@

    LRA (Single Query Track)

    - + 2019-Par4n 0 8614898.6724205.903861360501142142 @@ -602,7 +602,7 @@

    LRA (Single Query Track)

    - + z3n 0 8215660.0275658.78821363458182182 @@ -611,7 +611,7 @@

    LRA (Single Query Track)

    - + 2020-z3n 0 7966527.586501.054796354442207207 @@ -620,7 +620,7 @@

    LRA (Single Query Track)

    - + 2020-CVC4n 0 7276971.0756965.102727299428276276 @@ -629,7 +629,7 @@

    LRA (Single Query Track)

    - + cvc5 0 7236962.026961.783723296427280280 @@ -638,7 +638,7 @@

    LRA (Single Query Track)

    - + cvc5 - fixedn 0 7236962.2096961.929723296427280280 @@ -647,7 +647,7 @@

    LRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 62115511.43812212.261621256365382376 @@ -656,7 +656,7 @@

    LRA (Single Query Track)

    - + Vampire 0 48212874.30212660.1494820482521521 @@ -665,7 +665,7 @@

    LRA (Single Query Track)

    - + Vampire - fixedn 0 47412883.19112660.8784740474529521 @@ -674,7 +674,7 @@

    LRA (Single Query Track)

    - + iProver 0 624188.61224000.342606997942 @@ -683,7 +683,7 @@

    LRA (Single Query Track)

    - + SMTInterpol 0 52606.2021372.42751499819 @@ -707,7 +707,6 @@

    LRA (Single Query Track)

    - + - diff --git a/archive/2021/results/nia-cloud.html b/archive/2021/results/nia-cloud.html index 8e1e21dc..ea5696d7 100644 --- a/archive/2021/results/nia-cloud.html +++ b/archive/2021/results/nia-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    NIA (Cloud Track)

    Competition results for the NIA - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    NIA (Cloud Track)

    - + cvc5-gg 0 00.000010 @@ -126,7 +126,7 @@

    NIA (Cloud Track)

    - + Vampire 0 01200.000011 @@ -146,7 +146,7 @@

    NIA (Cloud Track)

    - + cvc5-gg 0 00.0000100 @@ -155,7 +155,7 @@

    NIA (Cloud Track)

    - + Vampire 0 01200.0000101 @@ -175,7 +175,7 @@

    NIA (Cloud Track)

    - + cvc5-gg 0 00.0000010 @@ -184,7 +184,7 @@

    NIA (Cloud Track)

    - + Vampire 0 00.0000011 @@ -204,7 +204,7 @@

    NIA (Cloud Track)

    - + cvc5-gg 0 00.000010 @@ -213,7 +213,7 @@

    NIA (Cloud Track)

    - + Vampire 0 024.000011 @@ -237,7 +237,6 @@

    NIA (Cloud Track)

    - + - diff --git a/archive/2021/results/nia-parallel.html b/archive/2021/results/nia-parallel.html index 87e70cac..f73321a7 100644 --- a/archive/2021/results/nia-parallel.html +++ b/archive/2021/results/nia-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    NIA (Parallel Track)

    Competition results for the NIA - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    NIA (Parallel Track)

    - + cvc5-gg 0 00.000010 @@ -126,7 +126,7 @@

    NIA (Parallel Track)

    - + Vampire 0 01200.000011 @@ -146,7 +146,7 @@

    NIA (Parallel Track)

    - + cvc5-gg 0 00.0000100 @@ -155,7 +155,7 @@

    NIA (Parallel Track)

    - + Vampire 0 01200.0000101 @@ -175,7 +175,7 @@

    NIA (Parallel Track)

    - + cvc5-gg 0 00.0000010 @@ -184,7 +184,7 @@

    NIA (Parallel Track)

    - + Vampire 0 00.0000011 @@ -204,7 +204,7 @@

    NIA (Parallel Track)

    - + cvc5-gg 0 00.000010 @@ -213,7 +213,7 @@

    NIA (Parallel Track)

    - + Vampire 0 024.000011 @@ -237,7 +237,6 @@

    NIA (Parallel Track)

    - + - diff --git a/archive/2021/results/nia-single-query.html b/archive/2021/results/nia-single-query.html index aee54142..4dc70690 100644 --- a/archive/2021/results/nia-single-query.html +++ b/archive/2021/results/nia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    NIA (Single Query Track)

    Competition results for the NIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    NIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    NIA (Single Query Track)

    - + cvc5 - fixedn 0 17 @@ -142,7 +142,7 @@

    NIA (Single Query Track)

    - + 2018-Z3n 0 16 @@ -153,7 +153,7 @@

    NIA (Single Query Track)

    - + cvc5 0 16 @@ -164,7 +164,7 @@

    NIA (Single Query Track)

    - + 2020-z3n 0 15 @@ -175,7 +175,7 @@

    NIA (Single Query Track)

    - + z3n 0 14 @@ -186,7 +186,7 @@

    NIA (Single Query Track)

    - + 2020-CVC4n 0 12 @@ -197,7 +197,7 @@

    NIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 9 @@ -208,7 +208,7 @@

    NIA (Single Query Track)

    - + Vampire 0 4 @@ -219,7 +219,7 @@

    NIA (Single Query Track)

    - + Vampire - fixedn 0 4 @@ -230,7 +230,7 @@

    NIA (Single Query Track)

    - + iProver 0 0 @@ -252,7 +252,7 @@

    NIA (Single Query Track)

    - + cvc5 - fixedn 0 17343.004343.561712500 @@ -261,7 +261,7 @@

    NIA (Single Query Track)

    - + 2018-Z3n 0 169.3899.3941612410 @@ -270,7 +270,7 @@

    NIA (Single Query Track)

    - + cvc5 0 161225.6831225.6781611511 @@ -279,7 +279,7 @@

    NIA (Single Query Track)

    - + 2020-z3n 0 15610.065610.2161512320 @@ -288,7 +288,7 @@

    NIA (Single Query Track)

    - + z3n 0 14685.873686.1761412230 @@ -297,7 +297,7 @@

    NIA (Single Query Track)

    - + 2020-CVC4n 0 123886.1093886.1721211153 @@ -306,7 +306,7 @@

    NIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 98470.5918436.5198187 @@ -315,7 +315,7 @@

    NIA (Single Query Track)

    - + Vampire 0 415968.61915694.5674041313 @@ -324,7 +324,7 @@

    NIA (Single Query Track)

    - + Vampire - fixedn 0 416180.87515748.0144041313 @@ -333,7 +333,7 @@

    NIA (Single Query Track)

    - + iProver 0 020400.020400.00001717 @@ -353,7 +353,7 @@

    NIA (Single Query Track)

    - + 2020-z3n 0 120.7460.74612120050 @@ -362,7 +362,7 @@

    NIA (Single Query Track)

    - + z3n 0 120.8050.80912120050 @@ -371,7 +371,7 @@

    NIA (Single Query Track)

    - + 2018-Z3n 0 120.8260.8312120050 @@ -380,7 +380,7 @@

    NIA (Single Query Track)

    - + cvc5 - fixedn 0 12331.411331.96812120050 @@ -389,7 +389,7 @@

    NIA (Single Query Track)

    - + 2020-CVC4n 0 11285.801285.86411110153 @@ -398,7 +398,7 @@

    NIA (Single Query Track)

    - + cvc5 0 111201.0541201.04911110151 @@ -407,7 +407,7 @@

    NIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 84849.5314826.567880457 @@ -416,7 +416,7 @@

    NIA (Single Query Track)

    - + iProver 0 014400.014400.000012517 @@ -425,7 +425,7 @@

    NIA (Single Query Track)

    - + Vampire 0 014400.014400.000012513 @@ -434,7 +434,7 @@

    NIA (Single Query Track)

    - + Vampire - fixedn 0 014400.014400.000012513 @@ -454,7 +454,7 @@

    NIA (Single Query Track)

    - + cvc5 - fixedn 0 511.59311.5925050120 @@ -463,7 +463,7 @@

    NIA (Single Query Track)

    - + cvc5 0 524.62824.6295050121 @@ -472,7 +472,7 @@

    NIA (Single Query Track)

    - + 2018-Z3n 0 48.5638.5634041120 @@ -481,7 +481,7 @@

    NIA (Single Query Track)

    - + Vampire 0 41568.6191294.56740411213 @@ -490,7 +490,7 @@

    NIA (Single Query Track)

    - + Vampire - fixedn 0 41780.8751348.01440411213 @@ -499,7 +499,7 @@

    NIA (Single Query Track)

    - + 2020-z3n 0 3609.32609.473032120 @@ -508,7 +508,7 @@

    NIA (Single Query Track)

    - + z3n 0 2685.067685.3682023120 @@ -517,7 +517,7 @@

    NIA (Single Query Track)

    - + 2020-CVC4n 0 13600.3083600.3081014123 @@ -526,7 +526,7 @@

    NIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 13621.063609.9431014127 @@ -535,7 +535,7 @@

    NIA (Single Query Track)

    - + iProver 0 06000.06000.000051217 @@ -555,7 +555,7 @@

    NIA (Single Query Track)

    - + 2018-Z3n 0 169.3899.3941612410 @@ -564,7 +564,7 @@

    NIA (Single Query Track)

    - + cvc5 - fixedn 0 1636.65636.651611511 @@ -573,7 +573,7 @@

    NIA (Single Query Track)

    - + cvc5 0 1649.68349.6781611511 @@ -582,7 +582,7 @@

    NIA (Single Query Track)

    - + 2020-z3n 0 1549.87649.8771512322 @@ -591,7 +591,7 @@

    NIA (Single Query Track)

    - + z3n 0 1472.99272.9951412233 @@ -600,7 +600,7 @@

    NIA (Single Query Track)

    - + 2020-CVC4n 0 1199.58299.581110164 @@ -609,7 +609,7 @@

    NIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 9238.591204.5198187 @@ -618,7 +618,7 @@

    NIA (Single Query Track)

    - + Vampire 0 3337.139337.2233031414 @@ -627,7 +627,7 @@

    NIA (Single Query Track)

    - + Vampire - fixedn 0 2360.255360.2532021515 @@ -636,7 +636,7 @@

    NIA (Single Query Track)

    - + iProver 0 0408.0408.00001717 @@ -660,7 +660,6 @@

    NIA (Single Query Track)

    - + - diff --git a/archive/2021/results/nia-unsat-core.html b/archive/2021/results/nia-unsat-core.html index 2bed18dc..34ad3a51 100644 --- a/archive/2021/results/nia-unsat-core.html +++ b/archive/2021/results/nia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    NIA (Unsat Core Track)

    Competition results for the NIA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    NIA (Unsat Core Track)

    Sequential PerformanceParallel Performance —— - - + + @@ -126,7 +126,7 @@

    NIA (Unsat Core Track)

    - + z3n 0 0 @@ -137,7 +137,7 @@

    NIA (Unsat Core Track)

    - + cvc5-uc 0 0 @@ -148,7 +148,7 @@

    NIA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 0 @@ -159,7 +159,7 @@

    NIA (Unsat Core Track)

    - + Vampire 0 0 @@ -170,7 +170,7 @@

    NIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -192,7 +192,7 @@

    NIA (Unsat Core Track)

    - + z3n 0 00.120.1220 @@ -201,7 +201,7 @@

    NIA (Unsat Core Track)

    - + cvc5-uc 0 00.3740.3730 @@ -210,7 +210,7 @@

    NIA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 00.380.380 @@ -219,7 +219,7 @@

    NIA (Unsat Core Track)

    - + Vampire 0 0376.1595.9280 @@ -228,7 +228,7 @@

    NIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 01215.4861206.0411 @@ -252,7 +252,6 @@

    NIA (Unsat Core Track)

    - + - diff --git a/archive/2021/results/nra-cloud.html b/archive/2021/results/nra-cloud.html index 0e3d9265..01028b49 100644 --- a/archive/2021/results/nra-cloud.html +++ b/archive/2021/results/nra-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    NRA (Cloud Track)

    Competition results for the NRA - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    NRA (Cloud Track)

    - + Vampire 0 11201.00510111 @@ -126,7 +126,7 @@

    NRA (Cloud Track)

    - + cvc5-gg 0 00.000020 @@ -135,7 +135,7 @@

    NRA (Cloud Track)

    - + Par4n 0 02400.000022 @@ -155,7 +155,7 @@

    NRA (Cloud Track)

    - + cvc5-gg 0 00.0000110 @@ -164,7 +164,7 @@

    NRA (Cloud Track)

    - + Par4n 0 01200.0000112 @@ -173,7 +173,7 @@

    NRA (Cloud Track)

    - + Vampire 0 01200.0000111 @@ -193,7 +193,7 @@

    NRA (Cloud Track)

    - + Vampire 0 11.005101011 @@ -202,7 +202,7 @@

    NRA (Cloud Track)

    - + cvc5-gg 0 00.0000110 @@ -211,7 +211,7 @@

    NRA (Cloud Track)

    - + Par4n 0 01200.0000112 @@ -231,7 +231,7 @@

    NRA (Cloud Track)

    - + Vampire 0 125.00510111 @@ -240,7 +240,7 @@

    NRA (Cloud Track)

    - + cvc5-gg 0 00.000020 @@ -249,7 +249,7 @@

    NRA (Cloud Track)

    - + Par4n 0 048.000022 @@ -273,7 +273,6 @@

    NRA (Cloud Track)

    - + - diff --git a/archive/2021/results/nra-parallel.html b/archive/2021/results/nra-parallel.html index 764343e8..02106230 100644 --- a/archive/2021/results/nra-parallel.html +++ b/archive/2021/results/nra-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    NRA (Parallel Track)

    Competition results for the NRA - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    NRA (Parallel Track)

    - + Vampire 0 11200.49910111 @@ -126,7 +126,7 @@

    NRA (Parallel Track)

    - + cvc5-gg 0 00.000020 @@ -135,7 +135,7 @@

    NRA (Parallel Track)

    - + Par4n 0 02400.000022 @@ -155,7 +155,7 @@

    NRA (Parallel Track)

    - + cvc5-gg 0 00.0000110 @@ -164,7 +164,7 @@

    NRA (Parallel Track)

    - + Par4n 0 01200.0000112 @@ -173,7 +173,7 @@

    NRA (Parallel Track)

    - + Vampire 0 01200.0000111 @@ -193,7 +193,7 @@

    NRA (Parallel Track)

    - + Vampire 0 10.499101011 @@ -202,7 +202,7 @@

    NRA (Parallel Track)

    - + cvc5-gg 0 00.0000110 @@ -211,7 +211,7 @@

    NRA (Parallel Track)

    - + Par4n 0 01200.0000112 @@ -231,7 +231,7 @@

    NRA (Parallel Track)

    - + Vampire 0 124.49910111 @@ -240,7 +240,7 @@

    NRA (Parallel Track)

    - + cvc5-gg 0 00.000020 @@ -249,7 +249,7 @@

    NRA (Parallel Track)

    - + Par4n 0 048.000022 @@ -273,7 +273,6 @@

    NRA (Parallel Track)

    - + - diff --git a/archive/2021/results/nra-single-query.html b/archive/2021/results/nra-single-query.html index e4d43b44..46797109 100644 --- a/archive/2021/results/nra-single-query.html +++ b/archive/2021/results/nra-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    NRA (Single Query Track)

    Competition results for the NRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    NRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Yices2-QSYices2-QSYices2-QS - - + + Yices2-QS - - + + Yices2-QS - + @@ -131,7 +131,7 @@

    NRA (Single Query Track)

    - + Yices2-QS 0 297 @@ -142,7 +142,7 @@

    NRA (Single Query Track)

    - + z3n 0 296 @@ -153,7 +153,7 @@

    NRA (Single Query Track)

    - + 2020-z3n 0 296 @@ -164,7 +164,7 @@

    NRA (Single Query Track)

    - + 2019-Par4n 0 289 @@ -175,7 +175,7 @@

    NRA (Single Query Track)

    - + cvc5 - fixedn 0 153 @@ -186,7 +186,7 @@

    NRA (Single Query Track)

    - + cvc5 0 151 @@ -197,7 +197,7 @@

    NRA (Single Query Track)

    - + 2020-CVC4n 0 123 @@ -208,7 +208,7 @@

    NRA (Single Query Track)

    - + Vampire 0 81 @@ -219,7 +219,7 @@

    NRA (Single Query Track)

    - + Vampire - fixedn 0 79 @@ -230,7 +230,7 @@

    NRA (Single Query Track)

    - + iProver 0 50 @@ -241,7 +241,7 @@

    NRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 7 @@ -263,7 +263,7 @@

    NRA (Single Query Track)

    - + Yices2-QS 0 2973875.5553875.6572972118633 @@ -272,7 +272,7 @@

    NRA (Single Query Track)

    - + z3n 0 2966073.9046073.9822962108644 @@ -281,7 +281,7 @@

    NRA (Single Query Track)

    - + 2020-z3n 0 2966074.6616074.8422962108644 @@ -290,7 +290,7 @@

    NRA (Single Query Track)

    - + 2019-Par4n 0 29014541.413281.279290209811010 @@ -299,7 +299,7 @@

    NRA (Single Query Track)

    - + cvc5 - fixedn 0 153186959.011186977.6031539162147147 @@ -308,7 +308,7 @@

    NRA (Single Query Track)

    - + cvc5 0 151188726.307188746.0091519061149149 @@ -317,7 +317,7 @@

    NRA (Single Query Track)

    - + 2020-CVC4n 0 123116356.808116032.213123616217785 @@ -326,7 +326,7 @@

    NRA (Single Query Track)

    - + Vampire 0 85266535.634260341.32285085215215 @@ -335,7 +335,7 @@

    NRA (Single Query Track)

    - + Vampire - fixedn 0 79267555.814265013.99979079221220 @@ -344,7 +344,7 @@

    NRA (Single Query Track)

    - + iProver 0 51303569.279300027.1251051249249 @@ -353,7 +353,7 @@

    NRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 7295488.95295375.36707293246 @@ -373,7 +373,7 @@

    NRA (Single Query Track)

    - + Yices2-QS 0 211260.932261.03521121100893 @@ -382,7 +382,7 @@

    NRA (Single Query Track)

    - + z3n 0 2101209.781209.79321021001894 @@ -391,7 +391,7 @@

    NRA (Single Query Track)

    - + 2020-z3n 0 2101212.8961212.89821021001894 @@ -400,7 +400,7 @@

    NRA (Single Query Track)

    - + 2019-Par4n 0 2092407.2352410.392209209028910 @@ -409,7 +409,7 @@

    NRA (Single Query Track)

    - + cvc5 - fixedn 0 91154194.361154212.979191012089147 @@ -418,7 +418,7 @@

    NRA (Single Query Track)

    - + cvc5 0 90155101.248155120.979090012189149 @@ -427,7 +427,7 @@

    NRA (Single Query Track)

    - + 2020-CVC4n 0 6184645.35484609.55616101508985 @@ -436,7 +436,7 @@

    NRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0250809.792250805.93500021189246 @@ -445,7 +445,7 @@

    NRA (Single Query Track)

    - + iProver 0 0253200.0253200.000021189249 @@ -454,7 +454,7 @@

    NRA (Single Query Track)

    - + Vampire 0 0253200.0253200.000021189215 @@ -463,7 +463,7 @@

    NRA (Single Query Track)

    - + Vampire - fixedn 0 0253200.0253200.000021189220 @@ -483,7 +483,7 @@

    NRA (Single Query Track)

    - + Yices2-QS 0 862414.6232414.6228608622123 @@ -492,7 +492,7 @@

    NRA (Single Query Track)

    - + 2020-z3n 0 863661.7653661.9448608622124 @@ -501,7 +501,7 @@

    NRA (Single Query Track)

    - + z3n 0 863664.1243664.1898608622124 @@ -510,7 +510,7 @@

    NRA (Single Query Track)

    - + Vampire 0 8512135.6345941.322850853212215 @@ -519,7 +519,7 @@

    NRA (Single Query Track)

    - + 2019-Par4n 0 8110934.1659670.88781081721210 @@ -528,7 +528,7 @@

    NRA (Single Query Track)

    - + Vampire - fixedn 0 7913155.81410613.999790799212220 @@ -537,7 +537,7 @@

    NRA (Single Query Track)

    - + 2020-CVC4n 0 6230511.45530222.663620622621285 @@ -546,7 +546,7 @@

    NRA (Single Query Track)

    - + cvc5 - fixedn 0 6231564.6531564.6336206226212147 @@ -555,7 +555,7 @@

    NRA (Single Query Track)

    - + cvc5 0 6132425.0632425.0396106127212149 @@ -564,7 +564,7 @@

    NRA (Single Query Track)

    - + iProver 0 5149169.27945627.125105137212249 @@ -573,7 +573,7 @@

    NRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 743479.15843369.42570781212246 @@ -593,7 +593,7 @@

    NRA (Single Query Track)

    - + Yices2-QS 0 296129.412129.4322962108644 @@ -602,7 +602,7 @@

    NRA (Single Query Track)

    - + z3n 0 294160.492160.5612942108466 @@ -611,7 +611,7 @@

    NRA (Single Query Track)

    - + 2020-z3n 0 294164.557164.5632942108466 @@ -620,7 +620,7 @@

    NRA (Single Query Track)

    - + 2019-Par4n 0 288314.38311.627288209791212 @@ -629,7 +629,7 @@

    NRA (Single Query Track)

    - + cvc5 0 1074876.664876.6751074661193193 @@ -638,7 +638,7 @@

    NRA (Single Query Track)

    - + cvc5 - fixedn 0 1074877.0674877.0821074661193193 @@ -647,7 +647,7 @@

    NRA (Single Query Track)

    - + 2020-CVC4n 0 1005030.1545030.1481004159200200 @@ -656,7 +656,7 @@

    NRA (Single Query Track)

    - + Vampire 0 755408.4845409.90775075225225 @@ -665,7 +665,7 @@

    NRA (Single Query Track)

    - + Vampire - fixedn 0 755410.8215410.3375075225225 @@ -674,7 +674,7 @@

    NRA (Single Query Track)

    - + iProver 0 506445.7896138.6750050250250 @@ -683,7 +683,7 @@

    NRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 76192.956079.36707293246 @@ -707,7 +707,6 @@

    NRA (Single Query Track)

    - + - diff --git a/archive/2021/results/qf-abv-cloud.html b/archive/2021/results/qf-abv-cloud.html index 34e1f700..321135aa 100644 --- a/archive/2021/results/qf-abv-cloud.html +++ b/archive/2021/results/qf-abv-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ABV (Cloud Track)

    Competition results for the QF_ABV - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    QF_ABV (Cloud Track)

    - + Par4n 0 615363.5966151010 @@ -126,7 +126,7 @@

    QF_ABV (Cloud Track)

    - + cvc5-gg 0 016800.00001614 @@ -146,7 +146,7 @@

    QF_ABV (Cloud Track)

    - + Par4n 0 11369.44811011410 @@ -155,7 +155,7 @@

    QF_ABV (Cloud Track)

    - + cvc5-gg 0 01200.000021414 @@ -175,7 +175,7 @@

    QF_ABV (Cloud Track)

    - + Par4n 0 513994.1485059210 @@ -184,7 +184,7 @@

    QF_ABV (Cloud Track)

    - + cvc5-gg 0 015600.000014214 @@ -204,7 +204,7 @@

    QF_ABV (Cloud Track)

    - + cvc5-gg 0 0336.00001614 @@ -213,7 +213,7 @@

    QF_ABV (Cloud Track)

    - + Par4n 0 0384.00001616 @@ -237,7 +237,6 @@

    QF_ABV (Cloud Track)

    - + - diff --git a/archive/2021/results/qf-abv-incremental.html b/archive/2021/results/qf-abv-incremental.html index 52d70202..317efc5d 100644 --- a/archive/2021/results/qf-abv-incremental.html +++ b/archive/2021/results/qf-abv-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ABV (Incremental Track)

    Competition results for the QF_ABV - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_ABV (Incremental Track)

    Parallel Performance Bitwuzla - - + + @@ -124,7 +124,7 @@

    QF_ABV (Incremental Track)

    - + Bitwuzla 0 183647653.67247590.6473027 @@ -133,7 +133,7 @@

    QF_ABV (Incremental Track)

    - + 2020-Yices2 incrementaln 0 183421575.14421541.2093214 @@ -142,7 +142,7 @@

    QF_ABV (Incremental Track)

    - + Yices2 incremental 0 183421730.03921644.1293214 @@ -151,7 +151,7 @@

    QF_ABV (Incremental Track)

    - + 2020-Bitwuzla-fixedn 0 183449635.89649603.3353229 @@ -160,7 +160,7 @@

    QF_ABV (Incremental Track)

    - + MathSAT5n 0 178532198.60432079.0758122 @@ -169,7 +169,7 @@

    QF_ABV (Incremental Track)

    - + z3n 0 1619206301.386206293.264247157 @@ -178,7 +178,7 @@

    QF_ABV (Incremental Track)

    - + cvc5-inc 0 145049351.63449279.29641635 @@ -202,7 +202,6 @@

    QF_ABV (Incremental Track)

    - + - diff --git a/archive/2021/results/qf-abv-parallel.html b/archive/2021/results/qf-abv-parallel.html index 206c4a4a..c5ed786a 100644 --- a/archive/2021/results/qf-abv-parallel.html +++ b/archive/2021/results/qf-abv-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ABV (Parallel Track)

    Competition results for the QF_ABV - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    QF_ABV (Parallel Track)

    - + Par4n 0 516433.6115141212 @@ -126,7 +126,7 @@

    QF_ABV (Parallel Track)

    - + cvc5-gg 0 012000.00001710 @@ -146,7 +146,7 @@

    QF_ABV (Parallel Track)

    - + Par4n 0 11380.73911011512 @@ -155,7 +155,7 @@

    QF_ABV (Parallel Track)

    - + cvc5-gg 0 01200.000021510 @@ -175,7 +175,7 @@

    QF_ABV (Parallel Track)

    - + Par4n 0 415052.87240411212 @@ -184,7 +184,7 @@

    QF_ABV (Parallel Track)

    - + cvc5-gg 0 010800.000015210 @@ -204,7 +204,7 @@

    QF_ABV (Parallel Track)

    - + cvc5-gg 0 0240.00001710 @@ -213,7 +213,7 @@

    QF_ABV (Parallel Track)

    - + Par4n 0 0408.00001717 @@ -237,7 +237,6 @@

    QF_ABV (Parallel Track)

    - + - diff --git a/archive/2021/results/qf-abv-single-query.html b/archive/2021/results/qf-abv-single-query.html index 4185c3a7..eb186d00 100644 --- a/archive/2021/results/qf-abv-single-query.html +++ b/archive/2021/results/qf-abv-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ABV (Single Query Track)

    Competition results for the QF_ABV - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_ABV (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) BitwuzlaBitwuzlaBitwuzla - - + + Bitwuzla - - + + Bitwuzla - + @@ -131,7 +131,7 @@

    QF_ABV (Single Query Track)

    - + Bitwuzla 0 2860 @@ -142,7 +142,7 @@

    QF_ABV (Single Query Track)

    - + 2020-Bitwuzlan 0 2859 @@ -153,7 +153,7 @@

    QF_ABV (Single Query Track)

    - + 2020-Yices2-fixedn 0 2850 @@ -164,7 +164,7 @@

    QF_ABV (Single Query Track)

    - + 2020-Yices2n 0 2849 @@ -175,7 +175,7 @@

    QF_ABV (Single Query Track)

    - + Yices2 0 2847 @@ -186,7 +186,7 @@

    QF_ABV (Single Query Track)

    - + MathSAT5n 0 2807 @@ -197,7 +197,7 @@

    QF_ABV (Single Query Track)

    - + z3n 0 2782 @@ -208,7 +208,7 @@

    QF_ABV (Single Query Track)

    - + cvc5 0 2764 @@ -230,7 +230,7 @@

    QF_ABV (Single Query Track)

    - + Bitwuzla 0 286037020.35336994.088286019229382020 @@ -239,7 +239,7 @@

    QF_ABV (Single Query Track)

    - + 2020-Bitwuzlan 0 285937956.50737937.733285919229372121 @@ -248,7 +248,7 @@

    QF_ABV (Single Query Track)

    - + 2020-Yices2-fixedn 0 285059376.64359337.527285019179333030 @@ -257,7 +257,7 @@

    QF_ABV (Single Query Track)

    - + 2020-Yices2n 0 284959223.42159230.936284919179323131 @@ -266,7 +266,7 @@

    QF_ABV (Single Query Track)

    - + Yices2 0 284759805.01759795.54284719179303333 @@ -275,7 +275,7 @@

    QF_ABV (Single Query Track)

    - + MathSAT5n 0 2807111477.387111461.528280718929157372 @@ -284,7 +284,7 @@

    QF_ABV (Single Query Track)

    - + z3n 0 2782139476.939139453.456278218779059898 @@ -293,7 +293,7 @@

    QF_ABV (Single Query Track)

    - + cvc5 0 2764159125.882159239.87527641863901116116 @@ -313,7 +313,7 @@

    QF_ABV (Single Query Track)

    - + Bitwuzla 0 19226200.1366168.776192219220195720 @@ -322,7 +322,7 @@

    QF_ABV (Single Query Track)

    - + 2020-Bitwuzlan 0 19227134.5267115.1192219220195721 @@ -331,7 +331,7 @@

    QF_ABV (Single Query Track)

    - + 2020-Yices2n 0 191716154.89716156.039191719170695731 @@ -340,7 +340,7 @@

    QF_ABV (Single Query Track)

    - + 2020-Yices2-fixedn 0 191716152.48316159.026191719170695730 @@ -349,7 +349,7 @@

    QF_ABV (Single Query Track)

    - + Yices2 0 191716278.28416289.491191719170695733 @@ -358,7 +358,7 @@

    QF_ABV (Single Query Track)

    - + MathSAT5n 0 189249670.03249672.6351892189203195772 @@ -367,7 +367,7 @@

    QF_ABV (Single Query Track)

    - + z3n 0 187764332.46664306.6661877187704695798 @@ -376,7 +376,7 @@

    QF_ABV (Single Query Track)

    - + cvc5 0 186380659.2280780.87718631863060957116 @@ -396,7 +396,7 @@

    QF_ABV (Single Query Track)

    - + Bitwuzla 0 93830820.21730825.312938093819192320 @@ -405,7 +405,7 @@

    QF_ABV (Single Query Track)

    - + 2020-Bitwuzlan 0 93730821.98130822.632937093720192321 @@ -414,7 +414,7 @@

    QF_ABV (Single Query Track)

    - + 2020-Yices2-fixedn 0 93343224.1643178.501933093324192330 @@ -423,7 +423,7 @@

    QF_ABV (Single Query Track)

    - + 2020-Yices2n 0 93243068.52343074.896932093225192331 @@ -432,7 +432,7 @@

    QF_ABV (Single Query Track)

    - + Yices2 0 93043526.73443506.049930093027192333 @@ -441,7 +441,7 @@

    QF_ABV (Single Query Track)

    - + MathSAT5n 0 91561807.35561788.894915091542192372 @@ -450,7 +450,7 @@

    QF_ABV (Single Query Track)

    - + z3n 0 90575144.47375146.789905090552192398 @@ -459,7 +459,7 @@

    QF_ABV (Single Query Track)

    - + cvc5 0 90178466.66278458.9989010901561923116 @@ -479,7 +479,7 @@

    QF_ABV (Single Query Track)

    - + Bitwuzla 0 27973377.3043348.502279718869118383 @@ -488,7 +488,7 @@

    QF_ABV (Single Query Track)

    - + 2020-Bitwuzlan 0 27903409.3033412.263279018809109090 @@ -497,7 +497,7 @@

    QF_ABV (Single Query Track)

    - + 2020-Yices2-fixedn 0 27673894.8243903.50827671881886113113 @@ -506,7 +506,7 @@

    QF_ABV (Single Query Track)

    - + 2020-Yices2n 0 27673901.5123904.79327671881886113113 @@ -515,7 +515,7 @@

    QF_ABV (Single Query Track)

    - + Yices2 0 27673905.6863920.28727671881886113113 @@ -524,7 +524,7 @@

    QF_ABV (Single Query Track)

    - + MathSAT5n 0 27076406.8346386.01527071836871173172 @@ -533,7 +533,7 @@

    QF_ABV (Single Query Track)

    - + z3n 0 27055715.6415688.3827051838867175175 @@ -542,7 +542,7 @@

    QF_ABV (Single Query Track)

    - + cvc5 0 26829014.2628930.82526821818864198198 @@ -566,7 +566,6 @@

    QF_ABV (Single Query Track)

    - + - diff --git a/archive/2021/results/qf-abv-unsat-core.html b/archive/2021/results/qf-abv-unsat-core.html index 5b43e74a..ed988e5a 100644 --- a/archive/2021/results/qf-abv-unsat-core.html +++ b/archive/2021/results/qf-abv-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ABV (Unsat Core Track)

    Competition results for the QF_ABV - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_ABV (Unsat Core Track)

    Sequential PerformanceParallel Performance BitwuzlaBitwuzla - - + + @@ -126,7 +126,7 @@

    QF_ABV (Unsat Core Track)

    - + Bitwuzla 0 163370 @@ -137,7 +137,7 @@

    QF_ABV (Unsat Core Track)

    - + 2020-z3n 0 152946 @@ -148,7 +148,7 @@

    QF_ABV (Unsat Core Track)

    - + 2020-Yices2n 0 152248 @@ -159,7 +159,7 @@

    QF_ABV (Unsat Core Track)

    - + Yices2 0 152248 @@ -170,7 +170,7 @@

    QF_ABV (Unsat Core Track)

    - + z3n 0 150038 @@ -181,7 +181,7 @@

    QF_ABV (Unsat Core Track)

    - + cvc5-uc 0 134595 @@ -192,7 +192,7 @@

    QF_ABV (Unsat Core Track)

    - + MathSAT5n 0 35 @@ -214,7 +214,7 @@

    QF_ABV (Unsat Core Track)

    - + Bitwuzla 0 16337032011.93532019.24915 @@ -223,7 +223,7 @@

    QF_ABV (Unsat Core Track)

    - + 2020-z3n 0 15294644115.45444105.37131 @@ -232,7 +232,7 @@

    QF_ABV (Unsat Core Track)

    - + 2020-Yices2n 0 15224833856.46533877.319 @@ -241,7 +241,7 @@

    QF_ABV (Unsat Core Track)

    - + Yices2 0 15224834132.30434093.47219 @@ -250,7 +250,7 @@

    QF_ABV (Unsat Core Track)

    - + z3n 0 15003844043.73344049.23829 @@ -259,7 +259,7 @@

    QF_ABV (Unsat Core Track)

    - + cvc5-uc 0 13459554701.76154704.38538 @@ -268,7 +268,7 @@

    QF_ABV (Unsat Core Track)

    - + MathSAT5n 0 3546.61548.2080 @@ -292,7 +292,6 @@

    QF_ABV (Unsat Core Track)

    - + - diff --git a/archive/2021/results/qf-abvfp-incremental.html b/archive/2021/results/qf-abvfp-incremental.html index 266dd904..440c2a53 100644 --- a/archive/2021/results/qf-abvfp-incremental.html +++ b/archive/2021/results/qf-abvfp-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ABVFP (Incremental Track)

    Competition results for the QF_ABVFP - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_ABVFP (Incremental Track)

    Parallel Performance cvc5-inc - - + + @@ -124,7 +124,7 @@

    QF_ABVFP (Incremental Track)

    - + Bitwuzla - fixedn 0 27064154018.99253934.1079325 @@ -133,7 +133,7 @@

    QF_ABVFP (Incremental Track)

    - + 2020-Bitwuzla-fixedn 0 27064154186.26553995.9999325 @@ -142,7 +142,7 @@

    QF_ABVFP (Incremental Track)

    - + cvc5-inc 0 2369751915929.8341915399.345337591297 @@ -151,7 +151,7 @@

    QF_ABVFP (Incremental Track)

    - + 2020-CVC4-incn 0 2295962131396.0522130888.331411381524 @@ -160,7 +160,7 @@

    QF_ABVFP (Incremental Track)

    - + MathSAT5n 16 269696154134.21153331.739103842 @@ -169,7 +169,7 @@

    QF_ABVFP (Incremental Track)

    - + Bitwuzla 47 26901154063.60553809.349172325 @@ -193,7 +193,6 @@

    QF_ABVFP (Incremental Track)

    - + - diff --git a/archive/2021/results/qf-abvfp-single-query.html b/archive/2021/results/qf-abvfp-single-query.html index 541dc65d..4a611de3 100644 --- a/archive/2021/results/qf-abvfp-single-query.html +++ b/archive/2021/results/qf-abvfp-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ABVFP (Single Query Track)

    Competition results for the QF_ABVFP - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_ABVFP (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) BitwuzlaBitwuzlaBitwuzla - - + + Bitwuzla - - + + Bitwuzla - + @@ -131,7 +131,7 @@

    QF_ABVFP (Single Query Track)

    - + 2020-Bitwuzla-fixedn 0 617 @@ -142,7 +142,7 @@

    QF_ABVFP (Single Query Track)

    - + 2020-Bitwuzlan 0 617 @@ -153,7 +153,7 @@

    QF_ABVFP (Single Query Track)

    - + Bitwuzla 0 617 @@ -164,7 +164,7 @@

    QF_ABVFP (Single Query Track)

    - + MathSAT5n 0 612 @@ -175,7 +175,7 @@

    QF_ABVFP (Single Query Track)

    - + 2020-MathSAT5n 0 612 @@ -186,7 +186,7 @@

    QF_ABVFP (Single Query Track)

    - + 2020-CVC4n 0 608 @@ -197,7 +197,7 @@

    QF_ABVFP (Single Query Track)

    - + cvc5 0 608 @@ -208,7 +208,7 @@

    QF_ABVFP (Single Query Track)

    - + COLIBRI - fixedn 0 511 @@ -219,7 +219,7 @@

    QF_ABVFP (Single Query Track)

    - + 2020-COLIBRIn 0 504 @@ -230,7 +230,7 @@

    QF_ABVFP (Single Query Track)

    - + COLIBRI 3 507 @@ -252,7 +252,7 @@

    QF_ABVFP (Single Query Track)

    - + 2020-Bitwuzla-fixedn 0 6175300.0375300.73861712149622 @@ -261,7 +261,7 @@

    QF_ABVFP (Single Query Track)

    - + 2020-Bitwuzlan 0 6175364.9925321.87861712149622 @@ -270,7 +270,7 @@

    QF_ABVFP (Single Query Track)

    - + Bitwuzla 0 6176387.316386.85961712149622 @@ -279,7 +279,7 @@

    QF_ABVFP (Single Query Track)

    - + MathSAT5n 0 61214295.77914233.39661212149177 @@ -288,7 +288,7 @@

    QF_ABVFP (Single Query Track)

    - + 2020-MathSAT5n 0 61214475.99914339.2461212149177 @@ -297,7 +297,7 @@

    QF_ABVFP (Single Query Track)

    - + 2020-CVC4n 0 60818920.84318873.5136081194891111 @@ -306,7 +306,7 @@

    QF_ABVFP (Single Query Track)

    - + cvc5 0 60824855.22924798.0146081194891111 @@ -315,7 +315,7 @@

    QF_ABVFP (Single Query Track)

    - + COLIBRI - fixedn 0 51124286.17224283.41751110940210815 @@ -324,7 +324,7 @@

    QF_ABVFP (Single Query Track)

    - + 2020-COLIBRIn 0 50432422.04432376.63850410939511525 @@ -333,7 +333,7 @@

    QF_ABVFP (Single Query Track)

    - + COLIBRI 3 50724929.29124878.56650710540211216 @@ -353,7 +353,7 @@

    QF_ABVFP (Single Query Track)

    - + 2020-Bitwuzla-fixedn 0 121192.725193.051121121004982 @@ -362,7 +362,7 @@

    QF_ABVFP (Single Query Track)

    - + 2020-Bitwuzlan 0 121193.767193.888121121004982 @@ -371,7 +371,7 @@

    QF_ABVFP (Single Query Track)

    - + Bitwuzla 0 121200.063200.162121121004982 @@ -380,7 +380,7 @@

    QF_ABVFP (Single Query Track)

    - + MathSAT5n 0 121639.399639.567121121004987 @@ -389,7 +389,7 @@

    QF_ABVFP (Single Query Track)

    - + 2020-MathSAT5n 0 121669.155667.223121121004987 @@ -398,7 +398,7 @@

    QF_ABVFP (Single Query Track)

    - + 2020-CVC4n 0 1193167.1543167.391191190249811 @@ -407,7 +407,7 @@

    QF_ABVFP (Single Query Track)

    - + cvc5 0 1193368.3713365.3011191190249811 @@ -416,7 +416,7 @@

    QF_ABVFP (Single Query Track)

    - + 2020-COLIBRIn 0 1093917.7293891.40510910901249825 @@ -425,7 +425,7 @@

    QF_ABVFP (Single Query Track)

    - + COLIBRI - fixedn 0 1095304.5025307.97910910901249815 @@ -434,7 +434,7 @@

    QF_ABVFP (Single Query Track)

    - + COLIBRI 0 1056550.7656515.12710510501649816 @@ -454,7 +454,7 @@

    QF_ABVFP (Single Query Track)

    - + 2020-Bitwuzla-fixedn 0 4962707.3132707.686496049601232 @@ -463,7 +463,7 @@

    QF_ABVFP (Single Query Track)

    - + 2020-Bitwuzlan 0 4962771.2252727.99496049601232 @@ -472,7 +472,7 @@

    QF_ABVFP (Single Query Track)

    - + Bitwuzla 0 4963787.2473786.697496049601232 @@ -481,7 +481,7 @@

    QF_ABVFP (Single Query Track)

    - + MathSAT5n 0 49111256.38111193.829491049151237 @@ -490,7 +490,7 @@

    QF_ABVFP (Single Query Track)

    - + 2020-MathSAT5n 0 49111406.84411272.017491049151237 @@ -499,7 +499,7 @@

    QF_ABVFP (Single Query Track)

    - + 2020-CVC4n 0 48913353.68913306.1234890489712311 @@ -508,7 +508,7 @@

    QF_ABVFP (Single Query Track)

    - + cvc5 0 48919086.85819032.7124890489712311 @@ -517,7 +517,7 @@

    QF_ABVFP (Single Query Track)

    - + COLIBRI - fixedn 0 40216581.6716575.43840204029412315 @@ -526,7 +526,7 @@

    QF_ABVFP (Single Query Track)

    - + 2020-COLIBRIn 0 39526391.35526372.135395039510112325 @@ -535,7 +535,7 @@

    QF_ABVFP (Single Query Track)

    - + COLIBRI 3 40215978.52615963.43840204029412316 @@ -555,7 +555,7 @@

    QF_ABVFP (Single Query Track)

    - + 2020-Bitwuzla-fixedn 0 6031031.0761031.4936031204831616 @@ -564,7 +564,7 @@

    QF_ABVFP (Single Query Track)

    - + 2020-Bitwuzlan 0 6021039.1951030.3676021194831717 @@ -573,7 +573,7 @@

    QF_ABVFP (Single Query Track)

    - + Bitwuzla 0 6011073.0381072.2276011194821818 @@ -582,7 +582,7 @@

    QF_ABVFP (Single Query Track)

    - + MathSAT5n 0 5882510.4022496.5635881174713131 @@ -591,7 +591,7 @@

    QF_ABVFP (Single Query Track)

    - + 2020-MathSAT5n 0 5862611.6872559.3915861174693333 @@ -600,7 +600,7 @@

    QF_ABVFP (Single Query Track)

    - + 2020-CVC4n 0 5742790.5972774.0725741134614545 @@ -609,7 +609,7 @@

    QF_ABVFP (Single Query Track)

    - + cvc5 0 5034720.7734702.128503111392116116 @@ -618,7 +618,7 @@

    QF_ABVFP (Single Query Track)

    - + COLIBRI - fixedn 0 5012008.5472004.72750110239911829 @@ -627,7 +627,7 @@

    QF_ABVFP (Single Query Track)

    - + 2020-COLIBRIn 0 4951846.1981827.14349510638912435 @@ -636,7 +636,7 @@

    QF_ABVFP (Single Query Track)

    - + COLIBRI 3 4991969.1771951.0794999940012028 @@ -660,7 +660,6 @@

    QF_ABVFP (Single Query Track)

    - + - diff --git a/archive/2021/results/qf-abvfp-unsat-core.html b/archive/2021/results/qf-abvfp-unsat-core.html index 6ccc0c21..738b8dc9 100644 --- a/archive/2021/results/qf-abvfp-unsat-core.html +++ b/archive/2021/results/qf-abvfp-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ABVFP (Unsat Core Track)

    Competition results for the QF_ABVFP - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_ABVFP (Unsat Core Track)

    Sequential PerformanceParallel Performance BitwuzlaBitwuzla - - + + @@ -126,7 +126,7 @@

    QF_ABVFP (Unsat Core Track)

    - + 2020-Bitwuzlan 0 16778 @@ -137,7 +137,7 @@

    QF_ABVFP (Unsat Core Track)

    - + 2020-Bitwuzla-fixedn 0 16767 @@ -148,7 +148,7 @@

    QF_ABVFP (Unsat Core Track)

    - + Bitwuzla 0 16684 @@ -159,7 +159,7 @@

    QF_ABVFP (Unsat Core Track)

    - + cvc5-uc 0 16440 @@ -170,7 +170,7 @@

    QF_ABVFP (Unsat Core Track)

    - + MathSAT5n 0 70 @@ -192,7 +192,7 @@

    QF_ABVFP (Unsat Core Track)

    - + 2020-Bitwuzlan 0 167781501.7431502.7470 @@ -201,7 +201,7 @@

    QF_ABVFP (Unsat Core Track)

    - + 2020-Bitwuzla-fixedn 0 167671599.1121590.1880 @@ -210,7 +210,7 @@

    QF_ABVFP (Unsat Core Track)

    - + Bitwuzla 0 166841576.3451565.4510 @@ -219,7 +219,7 @@

    QF_ABVFP (Unsat Core Track)

    - + cvc5-uc 0 1644020875.50620857.29812 @@ -228,7 +228,7 @@

    QF_ABVFP (Unsat Core Track)

    - + MathSAT5n 0 70158.621164.1350 @@ -252,7 +252,6 @@

    QF_ABVFP (Unsat Core Track)

    - + - diff --git a/archive/2021/results/qf-abvfplra-single-query.html b/archive/2021/results/qf-abvfplra-single-query.html index bee8fae7..e99a3ef7 100644 --- a/archive/2021/results/qf-abvfplra-single-query.html +++ b/archive/2021/results/qf-abvfplra-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ABVFPLRA (Single Query Track)

    Competition results for the QF_ABVFPLRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_ABVFPLRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) COLIBRICOLIBRICOLIBRI - - + + COLIBRI - - + + COLIBRI - + @@ -131,7 +131,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + COLIBRI - fixedn 0 25 @@ -142,7 +142,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + COLIBRI 0 25 @@ -153,7 +153,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + cvc5 0 25 @@ -164,7 +164,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + 2020-CVC4n 0 25 @@ -175,7 +175,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + MathSAT5n 0 13 @@ -186,7 +186,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + 2020-MathSAT5n 0 13 @@ -197,7 +197,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + Bitwuzla 0 9 @@ -208,7 +208,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + 2020-COLIBRIn 3 17 @@ -230,7 +230,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + COLIBRI - fixedn 0 2533.71733.8022520500 @@ -239,7 +239,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + COLIBRI 0 2534.01534.0362520500 @@ -248,7 +248,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + cvc5 0 25461.359460.5042520500 @@ -257,7 +257,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + 2020-CVC4n 0 251297.5331296.7692520500 @@ -266,7 +266,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + MathSAT5n 0 131220.3411220.34413130121 @@ -275,7 +275,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + 2020-MathSAT5n 0 131221.7981221.80313130121 @@ -284,7 +284,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + Bitwuzla 0 92.4542.657990160 @@ -293,7 +293,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + 2020-COLIBRIn 3 172702.2082702.4151717082 @@ -313,7 +313,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + COLIBRI - fixedn 0 2017.94818.01320200050 @@ -322,7 +322,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + COLIBRI 0 2018.0518.06420200050 @@ -331,7 +331,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + cvc5 0 20101.231100.28120200050 @@ -340,7 +340,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + 2020-CVC4n 0 20506.701506.8720200050 @@ -349,7 +349,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + MathSAT5n 0 131220.2061220.20913130751 @@ -358,7 +358,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + 2020-MathSAT5n 0 131221.6531221.65813130751 @@ -367,7 +367,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + Bitwuzla 0 92.432.5659901150 @@ -376,7 +376,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + 2020-COLIBRIn 3 171489.0221489.10117170352 @@ -396,7 +396,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + COLIBRI - fixedn 0 515.76915.7895050200 @@ -405,7 +405,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + COLIBRI 0 515.96415.9725050200 @@ -414,7 +414,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + cvc5 0 5360.127360.2235050200 @@ -423,7 +423,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + 2020-CVC4n 0 5790.832789.95050200 @@ -432,7 +432,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + Bitwuzla 0 00.0250.0920005200 @@ -441,7 +441,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + MathSAT5n 0 00.1350.1360005201 @@ -450,7 +450,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + 2020-MathSAT5n 0 00.1450.1450005201 @@ -459,7 +459,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + 2020-COLIBRIn 0 01213.1861213.3140005202 @@ -479,7 +479,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + COLIBRI - fixedn 0 2533.71733.8022520500 @@ -488,7 +488,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + COLIBRI 0 2534.01534.0362520500 @@ -497,7 +497,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + cvc5 0 22127.363126.4022219333 @@ -506,7 +506,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + 2020-CVC4n 0 21146.393145.382118344 @@ -515,7 +515,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + MathSAT5n 0 1344.34144.34413130121 @@ -524,7 +524,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + 2020-MathSAT5n 0 1345.79845.80313130121 @@ -533,7 +533,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + Bitwuzla 0 92.4542.657990160 @@ -542,7 +542,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + 2020-COLIBRIn 3 15159.376159.56715150104 @@ -566,7 +566,6 @@

    QF_ABVFPLRA (Single Query Track)

    - + - diff --git a/archive/2021/results/qf-alia-incremental.html b/archive/2021/results/qf-alia-incremental.html index 5326c2ee..8f677182 100644 --- a/archive/2021/results/qf-alia-incremental.html +++ b/archive/2021/results/qf-alia-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ALIA (Incremental Track)

    Competition results for the QF_ALIA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_ALIA (Incremental Track)

    Parallel Performance cvc5-inc - - + + @@ -124,7 +124,7 @@

    QF_ALIA (Incremental Track)

    - + 2020-z3n 0 530398823.669712.54700 @@ -133,7 +133,7 @@

    QF_ALIA (Incremental Track)

    - + z3n 0 530398893.309797.59500 @@ -142,7 +142,7 @@

    QF_ALIA (Incremental Track)

    - + 2018-Z3 (incremental)n 0 530398977.953881.92500 @@ -151,7 +151,7 @@

    QF_ALIA (Incremental Track)

    - + cvc5-inc 0 5303982545.0722413.15600 @@ -160,7 +160,7 @@

    QF_ALIA (Incremental Track)

    - + SMTInterpol 0 5303983808.4692595.61500 @@ -169,7 +169,7 @@

    QF_ALIA (Incremental Track)

    - + Yices2 incremental 0 5303522797.2032720.658462 @@ -178,7 +178,7 @@

    QF_ALIA (Incremental Track)

    - + MathSAT5n 0 5005601009.75896.402298380 @@ -202,7 +202,6 @@

    QF_ALIA (Incremental Track)

    - + - diff --git a/archive/2021/results/qf-alia-single-query.html b/archive/2021/results/qf-alia-single-query.html index 95728252..eb3ea267 100644 --- a/archive/2021/results/qf-alia-single-query.html +++ b/archive/2021/results/qf-alia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ALIA (Single Query Track)

    Competition results for the QF_ALIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_ALIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Yices2Yices2Yices2 - - + + Yices2 - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_ALIA (Single Query Track)

    - + 2019-Yices 2.6.2n 0 116 @@ -142,7 +142,7 @@

    QF_ALIA (Single Query Track)

    - + Yices2 0 116 @@ -153,7 +153,7 @@

    QF_ALIA (Single Query Track)

    - + MathSAT5n 0 116 @@ -164,7 +164,7 @@

    QF_ALIA (Single Query Track)

    - + z3n 0 116 @@ -175,7 +175,7 @@

    QF_ALIA (Single Query Track)

    - + SMTInterpol 0 116 @@ -186,7 +186,7 @@

    QF_ALIA (Single Query Track)

    - + cvc5 - fixedn 0 115 @@ -197,7 +197,7 @@

    QF_ALIA (Single Query Track)

    - + cvc5 0 111 @@ -208,7 +208,7 @@

    QF_ALIA (Single Query Track)

    - + veriT 0 6 @@ -230,7 +230,7 @@

    QF_ALIA (Single Query Track)

    - + 2019-Yices 2.6.2n 0 11654.28854.523116546200 @@ -239,7 +239,7 @@

    QF_ALIA (Single Query Track)

    - + Yices2 0 11665.9466.409116546200 @@ -248,7 +248,7 @@

    QF_ALIA (Single Query Track)

    - + MathSAT5n 0 116111.576105.051116546200 @@ -257,7 +257,7 @@

    QF_ALIA (Single Query Track)

    - + SMTInterpol 0 1161049.122522.623116546200 @@ -266,7 +266,7 @@

    QF_ALIA (Single Query Track)

    - + z3n 0 116923.683888.704116546200 @@ -275,7 +275,7 @@

    QF_ALIA (Single Query Track)

    - + cvc5 - fixedn 0 1156063.886064.612115546111 @@ -284,7 +284,7 @@

    QF_ALIA (Single Query Track)

    - + cvc5 0 1119980.7189981.603111506155 @@ -293,7 +293,7 @@

    QF_ALIA (Single Query Track)

    - + veriT 0 64.74.8736061100 @@ -313,7 +313,7 @@

    QF_ALIA (Single Query Track)

    - + 2019-Yices 2.6.2n 0 5448.03948.048545400620 @@ -322,7 +322,7 @@

    QF_ALIA (Single Query Track)

    - + Yices2 0 5459.35959.392545400620 @@ -331,7 +331,7 @@

    QF_ALIA (Single Query Track)

    - + MathSAT5n 0 5476.00569.462545400620 @@ -340,7 +340,7 @@

    QF_ALIA (Single Query Track)

    - + SMTInterpol 0 54478.868164.818545400620 @@ -349,7 +349,7 @@

    QF_ALIA (Single Query Track)

    - + z3n 0 54856.89824.514545400620 @@ -358,7 +358,7 @@

    QF_ALIA (Single Query Track)

    - + cvc5 - fixedn 0 543246.6063247.194545400621 @@ -367,7 +367,7 @@

    QF_ALIA (Single Query Track)

    - + cvc5 0 507182.3817182.932505004625 @@ -376,7 +376,7 @@

    QF_ALIA (Single Query Track)

    - + veriT 0 02.5262.53300054620 @@ -396,7 +396,7 @@

    QF_ALIA (Single Query Track)

    - + 2019-Yices 2.6.2n 0 626.2496.475620620540 @@ -405,7 +405,7 @@

    QF_ALIA (Single Query Track)

    - + Yices2 0 626.5817.017620620540 @@ -414,7 +414,7 @@

    QF_ALIA (Single Query Track)

    - + MathSAT5n 0 6235.57235.589620620540 @@ -423,7 +423,7 @@

    QF_ALIA (Single Query Track)

    - + z3n 0 6266.79364.19620620540 @@ -432,7 +432,7 @@

    QF_ALIA (Single Query Track)

    - + SMTInterpol 0 62570.254357.805620620540 @@ -441,7 +441,7 @@

    QF_ALIA (Single Query Track)

    - + cvc5 0 612798.3372798.671610611545 @@ -450,7 +450,7 @@

    QF_ALIA (Single Query Track)

    - + cvc5 - fixedn 0 612817.2742817.418610611541 @@ -459,7 +459,7 @@

    QF_ALIA (Single Query Track)

    - + veriT 0 62.1742.3460656540 @@ -479,7 +479,7 @@

    QF_ALIA (Single Query Track)

    - + 2019-Yices 2.6.2n 0 11654.28854.523116546200 @@ -488,7 +488,7 @@

    QF_ALIA (Single Query Track)

    - + Yices2 0 11665.9466.409116546200 @@ -497,7 +497,7 @@

    QF_ALIA (Single Query Track)

    - + MathSAT5n 0 116111.576105.051116546200 @@ -506,7 +506,7 @@

    QF_ALIA (Single Query Track)

    - + SMTInterpol 0 113812.745352.238113545933 @@ -515,7 +515,7 @@

    QF_ALIA (Single Query Track)

    - + z3n 0 104511.428505.05410443611212 @@ -524,7 +524,7 @@

    QF_ALIA (Single Query Track)

    - + cvc5 0 89684.99684.968936532727 @@ -533,7 +533,7 @@

    QF_ALIA (Single Query Track)

    - + cvc5 - fixedn 0 89685.233685.1938936532727 @@ -542,7 +542,7 @@

    QF_ALIA (Single Query Track)

    - + veriT 0 64.74.8736061100 @@ -566,7 +566,6 @@

    QF_ALIA (Single Query Track)

    - + - diff --git a/archive/2021/results/qf-alia-unsat-core.html b/archive/2021/results/qf-alia-unsat-core.html index 2726abf6..1238cad9 100644 --- a/archive/2021/results/qf-alia-unsat-core.html +++ b/archive/2021/results/qf-alia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ALIA (Unsat Core Track)

    Competition results for the QF_ALIA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_ALIA (Unsat Core Track)

    Sequential PerformanceParallel Performance SMTInterpolSMTInterpol - - + + @@ -126,7 +126,7 @@

    QF_ALIA (Unsat Core Track)

    - + 2020-z3n 0 720 @@ -137,7 +137,7 @@

    QF_ALIA (Unsat Core Track)

    - + SMTInterpol 0 713 @@ -148,7 +148,7 @@

    QF_ALIA (Unsat Core Track)

    - + z3n 0 712 @@ -159,7 +159,7 @@

    QF_ALIA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 613 @@ -170,7 +170,7 @@

    QF_ALIA (Unsat Core Track)

    - + SMTInterpol-remus 0 596 @@ -181,7 +181,7 @@

    QF_ALIA (Unsat Core Track)

    - + cvc5-uc 0 576 @@ -192,7 +192,7 @@

    QF_ALIA (Unsat Core Track)

    - + MathSAT5n 0 544 @@ -203,7 +203,7 @@

    QF_ALIA (Unsat Core Track)

    - + Yices2 0 533 @@ -214,7 +214,7 @@

    QF_ALIA (Unsat Core Track)

    - + 2020-Yices2-fixedn 0 533 @@ -236,7 +236,7 @@

    QF_ALIA (Unsat Core Track)

    - + 2020-z3n 0 7201.2461.2470 @@ -245,7 +245,7 @@

    QF_ALIA (Unsat Core Track)

    - + SMTInterpol 0 71327.7814.4030 @@ -254,7 +254,7 @@

    QF_ALIA (Unsat Core Track)

    - + SMTInterpol-remus 0 71319653.50518582.6870 @@ -263,7 +263,7 @@

    QF_ALIA (Unsat Core Track)

    - + z3n 0 7120.8410.8430 @@ -272,7 +272,7 @@

    QF_ALIA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 6131.2551.2580 @@ -281,7 +281,7 @@

    QF_ALIA (Unsat Core Track)

    - + cvc5-uc 0 5762.322.3060 @@ -290,7 +290,7 @@

    QF_ALIA (Unsat Core Track)

    - + MathSAT5n 0 5440.80.8010 @@ -299,7 +299,7 @@

    QF_ALIA (Unsat Core Track)

    - + Yices2 0 5330.1550.3760 @@ -308,7 +308,7 @@

    QF_ALIA (Unsat Core Track)

    - + 2020-Yices2-fixedn 0 5330.1560.5670 @@ -332,7 +332,6 @@

    QF_ALIA (Unsat Core Track)

    - + - diff --git a/archive/2021/results/qf-ania-incremental.html b/archive/2021/results/qf-ania-incremental.html index 6b58ea43..a280b0b8 100644 --- a/archive/2021/results/qf-ania-incremental.html +++ b/archive/2021/results/qf-ania-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ANIA (Incremental Track)

    Competition results for the QF_ANIA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_ANIA (Incremental Track)

    Parallel Performance SMTInterpol - - + + @@ -124,7 +124,7 @@

    QF_ANIA (Incremental Track)

    - + z3n 0 5172456.44252.7100 @@ -133,7 +133,7 @@

    QF_ANIA (Incremental Track)

    - + SMTInterpol 0 51724272.45494.90900 @@ -142,7 +142,7 @@

    QF_ANIA (Incremental Track)

    - + cvc5-inc 0 434292017.7322014.14182951 @@ -151,7 +151,7 @@

    QF_ANIA (Incremental Track)

    - + 2020-MathSAT5n 0 1210112.88312.717396230 @@ -160,7 +160,7 @@

    QF_ANIA (Incremental Track)

    - + MathSAT5n 0 1210115.13814.956396230 @@ -184,7 +184,6 @@

    QF_ANIA (Incremental Track)

    - + - diff --git a/archive/2021/results/qf-ania-single-query.html b/archive/2021/results/qf-ania-single-query.html index 3e9cdcb1..9e16794f 100644 --- a/archive/2021/results/qf-ania-single-query.html +++ b/archive/2021/results/qf-ania-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ANIA (Single Query Track)

    Competition results for the QF_ANIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_ANIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    QF_ANIA (Single Query Track)

    - + 2020-CVC4n 0 88 @@ -142,7 +142,7 @@

    QF_ANIA (Single Query Track)

    - + 2019-CVC4n 0 88 @@ -153,7 +153,7 @@

    QF_ANIA (Single Query Track)

    - + cvc5 - fixedn 0 86 @@ -164,7 +164,7 @@

    QF_ANIA (Single Query Track)

    - + cvc5 0 86 @@ -175,7 +175,7 @@

    QF_ANIA (Single Query Track)

    - + MathSAT5n 0 83 @@ -186,7 +186,7 @@

    QF_ANIA (Single Query Track)

    - + z3n 0 49 @@ -208,7 +208,7 @@

    QF_ANIA (Single Query Track)

    - + 2020-CVC4n 0 887775.817775.9368881766 @@ -217,7 +217,7 @@

    QF_ANIA (Single Query Track)

    - + 2019-CVC4n 0 887942.1437942.3738882666 @@ -226,7 +226,7 @@

    QF_ANIA (Single Query Track)

    - + cvc5 - fixedn 0 8612147.37112148.0298679788 @@ -235,7 +235,7 @@

    QF_ANIA (Single Query Track)

    - + cvc5 0 8612166.39912154.7418679788 @@ -244,7 +244,7 @@

    QF_ANIA (Single Query Track)

    - + MathSAT5n 0 836155.7746156.10383803114 @@ -253,7 +253,7 @@

    QF_ANIA (Single Query Track)

    - + z3n 0 4956307.95856308.298494274545 @@ -273,7 +273,7 @@

    QF_ANIA (Single Query Track)

    - + 2019-CVC4n 0 825440.7285440.95582820486 @@ -282,7 +282,7 @@

    QF_ANIA (Single Query Track)

    - + 2020-CVC4n 0 816202.416202.40981810586 @@ -291,7 +291,7 @@

    QF_ANIA (Single Query Track)

    - + MathSAT5n 0 8022.27222.28180800684 @@ -300,7 +300,7 @@

    QF_ANIA (Single Query Track)

    - + cvc5 - fixedn 0 7910547.01910547.66379790788 @@ -309,7 +309,7 @@

    QF_ANIA (Single Query Track)

    - + cvc5 0 7910567.02210555.22379790788 @@ -318,7 +318,7 @@

    QF_ANIA (Single Query Track)

    - + z3n 0 4255039.00455039.3264242044845 @@ -338,7 +338,7 @@

    QF_ANIA (Single Query Track)

    - + z3n 0 71268.9541268.97370718645 @@ -347,7 +347,7 @@

    QF_ANIA (Single Query Track)

    - + 2020-CVC4n 0 71573.41573.5277071866 @@ -356,7 +356,7 @@

    QF_ANIA (Single Query Track)

    - + cvc5 0 71599.3771599.5187071868 @@ -365,7 +365,7 @@

    QF_ANIA (Single Query Track)

    - + cvc5 - fixedn 0 71600.3521600.3667071868 @@ -374,7 +374,7 @@

    QF_ANIA (Single Query Track)

    - + 2019-CVC4n 0 62501.4152501.4186062866 @@ -383,7 +383,7 @@

    QF_ANIA (Single Query Track)

    - + MathSAT5n 0 36133.5026133.8223035864 @@ -403,7 +403,7 @@

    QF_ANIA (Single Query Track)

    - + 2020-CVC4n 0 82433.894433.89827931212 @@ -412,7 +412,7 @@

    QF_ANIA (Single Query Track)

    - + MathSAT5n 0 81172.364172.37381801136 @@ -421,7 +421,7 @@

    QF_ANIA (Single Query Track)

    - + 2019-CVC4n 0 81433.056433.052817831313 @@ -430,7 +430,7 @@

    QF_ANIA (Single Query Track)

    - + cvc5 0 74584.816572.344747132020 @@ -439,7 +439,7 @@

    QF_ANIA (Single Query Track)

    - + cvc5 - fixedn 0 74572.414572.405747132020 @@ -448,7 +448,7 @@

    QF_ANIA (Single Query Track)

    - + z3n 0 421303.7411303.749423665252 @@ -472,7 +472,6 @@

    QF_ANIA (Single Query Track)

    - + - diff --git a/archive/2021/results/qf-ania-unsat-core.html b/archive/2021/results/qf-ania-unsat-core.html index 67f872e0..e5358dee 100644 --- a/archive/2021/results/qf-ania-unsat-core.html +++ b/archive/2021/results/qf-ania-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ANIA (Unsat Core Track)

    Competition results for the QF_ANIA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_ANIA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5-uccvc5-uc - - + + @@ -126,7 +126,7 @@

    QF_ANIA (Unsat Core Track)

    - + cvc5-uc 0 58950 @@ -137,7 +137,7 @@

    QF_ANIA (Unsat Core Track)

    - + MathSAT5n 0 44055 @@ -148,7 +148,7 @@

    QF_ANIA (Unsat Core Track)

    - + z3n 0 5578 @@ -170,7 +170,7 @@

    QF_ANIA (Unsat Core Track)

    - + cvc5-uc 0 589501161.31161.7180 @@ -179,7 +179,7 @@

    QF_ANIA (Unsat Core Track)

    - + MathSAT5n 0 440556535.1716535.3185 @@ -188,7 +188,7 @@

    QF_ANIA (Unsat Core Track)

    - + z3n 0 55782405.8592405.862 @@ -212,7 +212,6 @@

    QF_ANIA (Unsat Core Track)

    - + - diff --git a/archive/2021/results/qf-aufbv-incremental.html b/archive/2021/results/qf-aufbv-incremental.html index 0107e6cd..40f546de 100644 --- a/archive/2021/results/qf-aufbv-incremental.html +++ b/archive/2021/results/qf-aufbv-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AUFBV (Incremental Track)

    Competition results for the QF_AUFBV - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_AUFBV (Incremental Track)

    Parallel Performance Yices2 incremental - - + + @@ -124,7 +124,7 @@

    QF_AUFBV (Incremental Track)

    - + 2020-Yices2 incrementaln 0 95010170.08210170.035176 @@ -133,7 +133,7 @@

    QF_AUFBV (Incremental Track)

    - + Yices2 incremental 0 95010271.44510267.651176 @@ -142,7 +142,7 @@

    QF_AUFBV (Incremental Track)

    - + MathSAT5n 0 79618770.33318764.46517114 @@ -151,7 +151,7 @@

    QF_AUFBV (Incremental Track)

    - + cvc5-inc 0 33020108.0520107.7363716 @@ -160,7 +160,7 @@

    QF_AUFBV (Incremental Track)

    - + 2020-Bitwuzla-fixedn 0 123154.44151.7078440 @@ -169,7 +169,7 @@

    QF_AUFBV (Incremental Track)

    - + Bitwuzla 0 123212.805211.9368440 @@ -178,7 +178,7 @@

    QF_AUFBV (Incremental Track)

    - + z3n 0 2323187.8823187.54494419 @@ -202,7 +202,6 @@

    QF_AUFBV (Incremental Track)

    - + - diff --git a/archive/2021/results/qf-aufbv-single-query.html b/archive/2021/results/qf-aufbv-single-query.html index b01dd53a..7e43a851 100644 --- a/archive/2021/results/qf-aufbv-single-query.html +++ b/archive/2021/results/qf-aufbv-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AUFBV (Single Query Track)

    Competition results for the QF_AUFBV - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_AUFBV (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Yices2Yices2Yices2 - - + + Yices2 - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_AUFBV (Single Query Track)

    - + 2020-Yices2n 0 32 @@ -142,7 +142,7 @@

    QF_AUFBV (Single Query Track)

    - + Yices2 0 32 @@ -153,7 +153,7 @@

    QF_AUFBV (Single Query Track)

    - + 2020-Yices2-fixedn 0 32 @@ -164,7 +164,7 @@

    QF_AUFBV (Single Query Track)

    - + MathSAT5n 0 23 @@ -175,7 +175,7 @@

    QF_AUFBV (Single Query Track)

    - + Bitwuzla 0 22 @@ -186,7 +186,7 @@

    QF_AUFBV (Single Query Track)

    - + 2020-Bitwuzlan 0 21 @@ -197,7 +197,7 @@

    QF_AUFBV (Single Query Track)

    - + z3n 0 19 @@ -208,7 +208,7 @@

    QF_AUFBV (Single Query Track)

    - + cvc5 0 17 @@ -230,7 +230,7 @@

    QF_AUFBV (Single Query Track)

    - + 2020-Yices2n 0 3214922.0514922.77932141899 @@ -239,7 +239,7 @@

    QF_AUFBV (Single Query Track)

    - + Yices2 0 3214959.99914960.62632141899 @@ -248,7 +248,7 @@

    QF_AUFBV (Single Query Track)

    - + 2020-Yices2-fixedn 0 3215011.25115011.93132141899 @@ -257,7 +257,7 @@

    QF_AUFBV (Single Query Track)

    - + MathSAT5n 0 2323684.36423684.855238151818 @@ -266,7 +266,7 @@

    QF_AUFBV (Single Query Track)

    - + Bitwuzla 0 226288.66288.971221012194 @@ -275,7 +275,7 @@

    QF_AUFBV (Single Query Track)

    - + 2020-Bitwuzlan 0 216289.2066289.464211011204 @@ -284,7 +284,7 @@

    QF_AUFBV (Single Query Track)

    - + z3n 0 1927431.53727431.679198112222 @@ -293,7 +293,7 @@

    QF_AUFBV (Single Query Track)

    - + cvc5 0 1730404.30630413.084175122424 @@ -313,7 +313,7 @@

    QF_AUFBV (Single Query Track)

    - + 2020-Yices2n 0 146631.7996632.278141403249 @@ -322,7 +322,7 @@

    QF_AUFBV (Single Query Track)

    - + Yices2 0 146659.3616659.866141403249 @@ -331,7 +331,7 @@

    QF_AUFBV (Single Query Track)

    - + 2020-Yices2-fixedn 0 146705.9696706.468141403249 @@ -340,7 +340,7 @@

    QF_AUFBV (Single Query Track)

    - + Bitwuzla 0 101440.221440.536101007244 @@ -349,7 +349,7 @@

    QF_AUFBV (Single Query Track)

    - + 2020-Bitwuzlan 0 101447.9971448.176101007244 @@ -358,7 +358,7 @@

    QF_AUFBV (Single Query Track)

    - + MathSAT5n 0 810989.24610989.28788092418 @@ -367,7 +367,7 @@

    QF_AUFBV (Single Query Track)

    - + z3n 0 811765.45311765.57788092422 @@ -376,7 +376,7 @@

    QF_AUFBV (Single Query Track)

    - + cvc5 0 514872.19114872.252550122424 @@ -396,7 +396,7 @@

    QF_AUFBV (Single Query Track)

    - + 2020-Yices2n 0 183490.253490.501180182219 @@ -405,7 +405,7 @@

    QF_AUFBV (Single Query Track)

    - + Yices2 0 183500.6383500.76180182219 @@ -414,7 +414,7 @@

    QF_AUFBV (Single Query Track)

    - + 2020-Yices2-fixedn 0 183505.2823505.463180182219 @@ -423,7 +423,7 @@

    QF_AUFBV (Single Query Track)

    - + MathSAT5n 0 157895.1187895.5681501552118 @@ -432,7 +432,7 @@

    QF_AUFBV (Single Query Track)

    - + Bitwuzla 0 1248.37948.435120128214 @@ -441,7 +441,7 @@

    QF_AUFBV (Single Query Track)

    - + cvc5 0 1210732.11510740.8321201282124 @@ -450,7 +450,7 @@

    QF_AUFBV (Single Query Track)

    - + 2020-Bitwuzlan 0 1141.20941.287110119214 @@ -459,7 +459,7 @@

    QF_AUFBV (Single Query Track)

    - + z3n 0 1110866.08410866.1031101192122 @@ -479,7 +479,7 @@

    QF_AUFBV (Single Query Track)

    - + 2020-Yices2n 0 23472.987473.023238151818 @@ -488,7 +488,7 @@

    QF_AUFBV (Single Query Track)

    - + 2020-Yices2-fixedn 0 23473.123473.16238151818 @@ -497,7 +497,7 @@

    QF_AUFBV (Single Query Track)

    - + Yices2 0 23473.761473.822238151818 @@ -506,7 +506,7 @@

    QF_AUFBV (Single Query Track)

    - + Bitwuzla 0 18212.69212.76818711238 @@ -515,7 +515,7 @@

    QF_AUFBV (Single Query Track)

    - + 2020-Bitwuzlan 0 17200.547200.6717710248 @@ -524,7 +524,7 @@

    QF_AUFBV (Single Query Track)

    - + MathSAT5n 0 17609.577609.584176112424 @@ -533,7 +533,7 @@

    QF_AUFBV (Single Query Track)

    - + cvc5 0 14687.586687.584143112727 @@ -542,7 +542,7 @@

    QF_AUFBV (Single Query Track)

    - + z3n 0 14696.666696.681144102727 @@ -566,7 +566,6 @@

    QF_AUFBV (Single Query Track)

    - + - diff --git a/archive/2021/results/qf-aufbv-unsat-core.html b/archive/2021/results/qf-aufbv-unsat-core.html index eb8e6791..2bd618b1 100644 --- a/archive/2021/results/qf-aufbv-unsat-core.html +++ b/archive/2021/results/qf-aufbv-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AUFBV (Unsat Core Track)

    Competition results for the QF_AUFBV - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_AUFBV (Unsat Core Track)

    Sequential PerformanceParallel Performance Yices2Yices2 - - + + @@ -126,7 +126,7 @@

    QF_AUFBV (Unsat Core Track)

    - + 2020-Yices2n 0 18287 @@ -137,7 +137,7 @@

    QF_AUFBV (Unsat Core Track)

    - + Yices2 0 18287 @@ -148,7 +148,7 @@

    QF_AUFBV (Unsat Core Track)

    - + 2020-z3n 0 16572 @@ -159,7 +159,7 @@

    QF_AUFBV (Unsat Core Track)

    - + cvc5-uc 0 15959 @@ -170,7 +170,7 @@

    QF_AUFBV (Unsat Core Track)

    - + z3n 0 15235 @@ -181,7 +181,7 @@

    QF_AUFBV (Unsat Core Track)

    - + Bitwuzla 0 15171 @@ -192,7 +192,7 @@

    QF_AUFBV (Unsat Core Track)

    - + MathSAT5n 0 0 @@ -214,7 +214,7 @@

    QF_AUFBV (Unsat Core Track)

    - + 2020-Yices2n 0 182874498.6294499.1982 @@ -223,7 +223,7 @@

    QF_AUFBV (Unsat Core Track)

    - + Yices2 0 182874560.684516.7862 @@ -232,7 +232,7 @@

    QF_AUFBV (Unsat Core Track)

    - + 2020-z3n 0 165725556.1015556.573 @@ -241,7 +241,7 @@

    QF_AUFBV (Unsat Core Track)

    - + cvc5-uc 0 159599546.8489534.9137 @@ -250,7 +250,7 @@

    QF_AUFBV (Unsat Core Track)

    - + z3n 0 1523511570.53411790.4029 @@ -259,7 +259,7 @@

    QF_AUFBV (Unsat Core Track)

    - + Bitwuzla 0 15171216.436216.5390 @@ -268,7 +268,7 @@

    QF_AUFBV (Unsat Core Track)

    - + MathSAT5n 0 02.8473.0830 @@ -292,7 +292,6 @@

    QF_AUFBV (Unsat Core Track)

    - + - diff --git a/archive/2021/results/qf-aufbvfp-single-query.html b/archive/2021/results/qf-aufbvfp-single-query.html index 67bf8221..2a10d1d2 100644 --- a/archive/2021/results/qf-aufbvfp-single-query.html +++ b/archive/2021/results/qf-aufbvfp-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AUFBVFP (Single Query Track)

    Competition results for the QF_AUFBVFP - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_AUFBVFP (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) BitwuzlaBitwuzlaBitwuzla - - + + — - - + + Bitwuzla - + @@ -131,7 +131,7 @@

    QF_AUFBVFP (Single Query Track)

    - + Bitwuzla 0 1 @@ -142,7 +142,7 @@

    QF_AUFBVFP (Single Query Track)

    - + MathSAT5n 0 1 @@ -153,7 +153,7 @@

    QF_AUFBVFP (Single Query Track)

    - + cvc5 0 0 @@ -164,7 +164,7 @@

    QF_AUFBVFP (Single Query Track)

    - + COLIBRI 0 0 @@ -175,7 +175,7 @@

    QF_AUFBVFP (Single Query Track)

    - + COLIBRI - fixedn 0 0 @@ -197,7 +197,7 @@

    QF_AUFBVFP (Single Query Track)

    - + Bitwuzla 0 10.3470.34711000 @@ -206,7 +206,7 @@

    QF_AUFBVFP (Single Query Track)

    - + MathSAT5n 0 11.831.8311000 @@ -215,7 +215,7 @@

    QF_AUFBVFP (Single Query Track)

    - + COLIBRI 0 01200.01200.000011 @@ -224,7 +224,7 @@

    QF_AUFBVFP (Single Query Track)

    - + cvc5 0 01200.01200.000011 @@ -233,7 +233,7 @@

    QF_AUFBVFP (Single Query Track)

    - + COLIBRI - fixedn 0 01200.01200.000011 @@ -253,7 +253,7 @@

    QF_AUFBVFP (Single Query Track)

    - + Bitwuzla 0 10.3470.347110000 @@ -262,7 +262,7 @@

    QF_AUFBVFP (Single Query Track)

    - + MathSAT5n 0 11.831.83110000 @@ -271,7 +271,7 @@

    QF_AUFBVFP (Single Query Track)

    - + COLIBRI 0 01200.01200.0000101 @@ -280,7 +280,7 @@

    QF_AUFBVFP (Single Query Track)

    - + cvc5 0 01200.01200.0000101 @@ -289,7 +289,7 @@

    QF_AUFBVFP (Single Query Track)

    - + COLIBRI - fixedn 0 01200.01200.0000101 @@ -309,7 +309,7 @@

    QF_AUFBVFP (Single Query Track)

    - + Bitwuzla 0 00.00.0000010 @@ -318,7 +318,7 @@

    QF_AUFBVFP (Single Query Track)

    - + COLIBRI 0 00.00.0000011 @@ -327,7 +327,7 @@

    QF_AUFBVFP (Single Query Track)

    - + cvc5 0 00.00.0000011 @@ -336,7 +336,7 @@

    QF_AUFBVFP (Single Query Track)

    - + MathSAT5n 0 00.00.0000010 @@ -345,7 +345,7 @@

    QF_AUFBVFP (Single Query Track)

    - + COLIBRI - fixedn 0 00.00.0000011 @@ -365,7 +365,7 @@

    QF_AUFBVFP (Single Query Track)

    - + Bitwuzla 0 10.3470.34711000 @@ -374,7 +374,7 @@

    QF_AUFBVFP (Single Query Track)

    - + MathSAT5n 0 11.831.8311000 @@ -383,7 +383,7 @@

    QF_AUFBVFP (Single Query Track)

    - + COLIBRI 0 024.024.000011 @@ -392,7 +392,7 @@

    QF_AUFBVFP (Single Query Track)

    - + cvc5 0 024.024.000011 @@ -401,7 +401,7 @@

    QF_AUFBVFP (Single Query Track)

    - + COLIBRI - fixedn 0 024.024.000011 @@ -425,7 +425,6 @@

    QF_AUFBVFP (Single Query Track)

    - + - diff --git a/archive/2021/results/qf-auflia-incremental.html b/archive/2021/results/qf-auflia-incremental.html index 08eaf3a9..5aeba605 100644 --- a/archive/2021/results/qf-auflia-incremental.html +++ b/archive/2021/results/qf-auflia-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AUFLIA (Incremental Track)

    Competition results for the QF_AUFLIA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_AUFLIA (Incremental Track)

    Parallel Performance cvc5-inc - - + + @@ -124,7 +124,7 @@

    QF_AUFLIA (Incremental Track)

    - + 2018-Yices (incremental)n 0 46998643249.8493040.41400 @@ -133,7 +133,7 @@

    QF_AUFLIA (Incremental Track)

    - + 2020-z3n 0 46416334739.8644578.663582311 @@ -142,7 +142,7 @@

    QF_AUFLIA (Incremental Track)

    - + z3n 0 45155816796.4186626.621842831 @@ -151,7 +151,7 @@

    QF_AUFLIA (Incremental Track)

    - + cvc5-inc 0 38911219029.0518853.4178087432 @@ -160,7 +160,7 @@

    QF_AUFLIA (Incremental Track)

    - + SMTInterpol 0 378832117387.64815874.5529115434 @@ -169,7 +169,7 @@

    QF_AUFLIA (Incremental Track)

    - + Yices2 incremental 0 30506262450.6052289.09516492380 @@ -178,7 +178,7 @@

    QF_AUFLIA (Incremental Track)

    - + MathSAT5n 0 20504675835.5795707.91526493971 @@ -202,7 +202,6 @@

    QF_AUFLIA (Incremental Track)

    - + - diff --git a/archive/2021/results/qf-auflia-single-query.html b/archive/2021/results/qf-auflia-single-query.html index 834d756c..343a2dde 100644 --- a/archive/2021/results/qf-auflia-single-query.html +++ b/archive/2021/results/qf-auflia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AUFLIA (Single Query Track)

    Competition results for the QF_AUFLIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_AUFLIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Yices2Yices2Yices2 - - + + Yices2 - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_AUFLIA (Single Query Track)

    - + z3n 0 558 @@ -142,7 +142,7 @@

    QF_AUFLIA (Single Query Track)

    - + 2019-Yices 2.6.2n 0 558 @@ -153,7 +153,7 @@

    QF_AUFLIA (Single Query Track)

    - + Yices2 0 558 @@ -164,7 +164,7 @@

    QF_AUFLIA (Single Query Track)

    - + MathSAT5n 0 558 @@ -175,7 +175,7 @@

    QF_AUFLIA (Single Query Track)

    - + cvc5 0 558 @@ -186,7 +186,7 @@

    QF_AUFLIA (Single Query Track)

    - + cvc5 - fixedn 0 558 @@ -197,7 +197,7 @@

    QF_AUFLIA (Single Query Track)

    - + SMTInterpol 0 558 @@ -208,7 +208,7 @@

    QF_AUFLIA (Single Query Track)

    - + veriT 0 39 @@ -230,7 +230,7 @@

    QF_AUFLIA (Single Query Track)

    - + z3n 0 55817.95418.08555827128700 @@ -239,7 +239,7 @@

    QF_AUFLIA (Single Query Track)

    - + 2019-Yices 2.6.2n 0 55818.520.20755827128700 @@ -248,7 +248,7 @@

    QF_AUFLIA (Single Query Track)

    - + Yices2 0 55822.02525.32455827128700 @@ -257,7 +257,7 @@

    QF_AUFLIA (Single Query Track)

    - + MathSAT5n 0 558132.023132.35455827128700 @@ -266,7 +266,7 @@

    QF_AUFLIA (Single Query Track)

    - + cvc5 0 558184.807183.08655827128700 @@ -275,7 +275,7 @@

    QF_AUFLIA (Single Query Track)

    - + cvc5 - fixedn 0 558188.607184.54255827128700 @@ -284,7 +284,7 @@

    QF_AUFLIA (Single Query Track)

    - + SMTInterpol 0 558883.299397.04455827128700 @@ -293,7 +293,7 @@

    QF_AUFLIA (Single Query Track)

    - + veriT 0 3921.78420.978390395190 @@ -313,7 +313,7 @@

    QF_AUFLIA (Single Query Track)

    - + z3n 0 2715.7345.833271271002870 @@ -322,7 +322,7 @@

    QF_AUFLIA (Single Query Track)

    - + 2019-Yices 2.6.2n 0 2716.6557.723271271002870 @@ -331,7 +331,7 @@

    QF_AUFLIA (Single Query Track)

    - + Yices2 0 2717.6429.589271271002870 @@ -340,7 +340,7 @@

    QF_AUFLIA (Single Query Track)

    - + MathSAT5n 0 27144.38644.409271271002870 @@ -349,7 +349,7 @@

    QF_AUFLIA (Single Query Track)

    - + cvc5 0 27181.81980.065271271002870 @@ -358,7 +358,7 @@

    QF_AUFLIA (Single Query Track)

    - + cvc5 - fixedn 0 27180.47980.387271271002870 @@ -367,7 +367,7 @@

    QF_AUFLIA (Single Query Track)

    - + SMTInterpol 0 271244.888125.262271271002870 @@ -376,7 +376,7 @@

    QF_AUFLIA (Single Query Track)

    - + veriT 0 010.41710.4940002712870 @@ -396,7 +396,7 @@

    QF_AUFLIA (Single Query Track)

    - + z3n 0 28712.2212.252287028702710 @@ -405,7 +405,7 @@

    QF_AUFLIA (Single Query Track)

    - + 2019-Yices 2.6.2n 0 28711.84512.484287028702710 @@ -414,7 +414,7 @@

    QF_AUFLIA (Single Query Track)

    - + Yices2 0 28714.38315.735287028702710 @@ -423,7 +423,7 @@

    QF_AUFLIA (Single Query Track)

    - + MathSAT5n 0 28787.63787.945287028702710 @@ -432,7 +432,7 @@

    QF_AUFLIA (Single Query Track)

    - + cvc5 0 287102.988103.021287028702710 @@ -441,7 +441,7 @@

    QF_AUFLIA (Single Query Track)

    - + cvc5 - fixedn 0 287108.127104.155287028702710 @@ -450,7 +450,7 @@

    QF_AUFLIA (Single Query Track)

    - + SMTInterpol 0 287638.411271.782287028702710 @@ -459,7 +459,7 @@

    QF_AUFLIA (Single Query Track)

    - + veriT 0 3911.36710.483390392482710 @@ -479,7 +479,7 @@

    QF_AUFLIA (Single Query Track)

    - + z3n 0 55817.95418.08555827128700 @@ -488,7 +488,7 @@

    QF_AUFLIA (Single Query Track)

    - + 2019-Yices 2.6.2n 0 55818.520.20755827128700 @@ -497,7 +497,7 @@

    QF_AUFLIA (Single Query Track)

    - + Yices2 0 55822.02525.32455827128700 @@ -506,7 +506,7 @@

    QF_AUFLIA (Single Query Track)

    - + MathSAT5n 0 558132.023132.35455827128700 @@ -515,7 +515,7 @@

    QF_AUFLIA (Single Query Track)

    - + SMTInterpol 0 558883.299397.04455827128700 @@ -524,7 +524,7 @@

    QF_AUFLIA (Single Query Track)

    - + cvc5 0 557182.957181.23455727128611 @@ -533,7 +533,7 @@

    QF_AUFLIA (Single Query Track)

    - + cvc5 - fixedn 0 557186.187182.12155727128611 @@ -542,7 +542,7 @@

    QF_AUFLIA (Single Query Track)

    - + veriT 0 3921.78420.978390395190 @@ -566,7 +566,6 @@

    QF_AUFLIA (Single Query Track)

    - + - diff --git a/archive/2021/results/qf-auflia-unsat-core.html b/archive/2021/results/qf-auflia-unsat-core.html index 30d793ae..467a588c 100644 --- a/archive/2021/results/qf-auflia-unsat-core.html +++ b/archive/2021/results/qf-auflia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AUFLIA (Unsat Core Track)

    Competition results for the QF_AUFLIA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_AUFLIA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5-uccvc5-uc - - + + @@ -126,7 +126,7 @@

    QF_AUFLIA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 24996 @@ -137,7 +137,7 @@

    QF_AUFLIA (Unsat Core Track)

    - + z3n 0 24250 @@ -148,7 +148,7 @@

    QF_AUFLIA (Unsat Core Track)

    - + 2020-z3n 0 24250 @@ -159,7 +159,7 @@

    QF_AUFLIA (Unsat Core Track)

    - + cvc5-uc 0 20101 @@ -170,7 +170,7 @@

    QF_AUFLIA (Unsat Core Track)

    - + Yices2 0 18845 @@ -181,7 +181,7 @@

    QF_AUFLIA (Unsat Core Track)

    - + 2020-Yices2-fixedn 0 18845 @@ -192,7 +192,7 @@

    QF_AUFLIA (Unsat Core Track)

    - + SMTInterpol-remus 0 12303 @@ -203,7 +203,7 @@

    QF_AUFLIA (Unsat Core Track)

    - + MathSAT5n 0 2329 @@ -214,7 +214,7 @@

    QF_AUFLIA (Unsat Core Track)

    - + SMTInterpol 0 1380 @@ -236,7 +236,7 @@

    QF_AUFLIA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 24996376.118376.7320 @@ -245,7 +245,7 @@

    QF_AUFLIA (Unsat Core Track)

    - + z3n 0 2425019.30121.480 @@ -254,7 +254,7 @@

    QF_AUFLIA (Unsat Core Track)

    - + 2020-z3n 0 2425024.39524.4450 @@ -263,7 +263,7 @@

    QF_AUFLIA (Unsat Core Track)

    - + cvc5-uc 0 201011383.4681381.4630 @@ -272,7 +272,7 @@

    QF_AUFLIA (Unsat Core Track)

    - + Yices2 0 1884517.54318.9160 @@ -281,7 +281,7 @@

    QF_AUFLIA (Unsat Core Track)

    - + 2020-Yices2-fixedn 0 1884517.58120.0070 @@ -290,7 +290,7 @@

    QF_AUFLIA (Unsat Core Track)

    - + SMTInterpol-remus 0 1621136567.4832978.8770 @@ -299,7 +299,7 @@

    QF_AUFLIA (Unsat Core Track)

    - + MathSAT5n 0 232998.577101.9830 @@ -308,7 +308,7 @@

    QF_AUFLIA (Unsat Core Track)

    - + SMTInterpol 0 1380589.523256.7130 @@ -332,7 +332,6 @@

    QF_AUFLIA (Unsat Core Track)

    - + - diff --git a/archive/2021/results/qf-aufnia-single-query.html b/archive/2021/results/qf-aufnia-single-query.html index e7d8327b..fca7c26b 100644 --- a/archive/2021/results/qf-aufnia-single-query.html +++ b/archive/2021/results/qf-aufnia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AUFNIA (Single Query Track)

    Competition results for the QF_AUFNIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_AUFNIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    QF_AUFNIA (Single Query Track)

    - + 2019-MathSAT-defaultn 0 9 @@ -142,7 +142,7 @@

    QF_AUFNIA (Single Query Track)

    - + MathSAT5n 0 9 @@ -153,7 +153,7 @@

    QF_AUFNIA (Single Query Track)

    - + 2020-CVC4n 0 9 @@ -164,7 +164,7 @@

    QF_AUFNIA (Single Query Track)

    - + z3n 0 9 @@ -175,7 +175,7 @@

    QF_AUFNIA (Single Query Track)

    - + cvc5 0 9 @@ -186,7 +186,7 @@

    QF_AUFNIA (Single Query Track)

    - + cvc5 - fixedn 0 9 @@ -208,7 +208,7 @@

    QF_AUFNIA (Single Query Track)

    - + 2019-MathSAT-defaultn 0 93.6873.6992700 @@ -217,7 +217,7 @@

    QF_AUFNIA (Single Query Track)

    - + MathSAT5n 0 96.6126.61892700 @@ -226,7 +226,7 @@

    QF_AUFNIA (Single Query Track)

    - + 2020-CVC4n 0 920.62320.62492700 @@ -235,7 +235,7 @@

    QF_AUFNIA (Single Query Track)

    - + z3n 0 937.90237.90692700 @@ -244,7 +244,7 @@

    QF_AUFNIA (Single Query Track)

    - + cvc5 - fixedn 0 971.88765.79792700 @@ -253,7 +253,7 @@

    QF_AUFNIA (Single Query Track)

    - + cvc5 0 966.34266.35292700 @@ -273,7 +273,7 @@

    QF_AUFNIA (Single Query Track)

    - + 2019-MathSAT-defaultn 0 20.4410.442220070 @@ -282,7 +282,7 @@

    QF_AUFNIA (Single Query Track)

    - + MathSAT5n 0 20.5280.531220070 @@ -291,7 +291,7 @@

    QF_AUFNIA (Single Query Track)

    - + 2020-CVC4n 0 22.5132.512220070 @@ -300,7 +300,7 @@

    QF_AUFNIA (Single Query Track)

    - + cvc5 - fixedn 0 215.0528.956220070 @@ -309,7 +309,7 @@

    QF_AUFNIA (Single Query Track)

    - + cvc5 0 28.9638.967220070 @@ -318,7 +318,7 @@

    QF_AUFNIA (Single Query Track)

    - + z3n 0 236.8436.844220070 @@ -338,7 +338,7 @@

    QF_AUFNIA (Single Query Track)

    - + z3n 0 71.0621.063707020 @@ -347,7 +347,7 @@

    QF_AUFNIA (Single Query Track)

    - + 2019-MathSAT-defaultn 0 73.2463.249707020 @@ -356,7 +356,7 @@

    QF_AUFNIA (Single Query Track)

    - + MathSAT5n 0 76.0846.087707020 @@ -365,7 +365,7 @@

    QF_AUFNIA (Single Query Track)

    - + 2020-CVC4n 0 718.1118.112707020 @@ -374,7 +374,7 @@

    QF_AUFNIA (Single Query Track)

    - + cvc5 - fixedn 0 756.83556.841707020 @@ -383,7 +383,7 @@

    QF_AUFNIA (Single Query Track)

    - + cvc5 0 757.37957.385707020 @@ -403,7 +403,7 @@

    QF_AUFNIA (Single Query Track)

    - + 2019-MathSAT-defaultn 0 93.6873.6992700 @@ -412,7 +412,7 @@

    QF_AUFNIA (Single Query Track)

    - + MathSAT5n 0 96.6126.61892700 @@ -421,7 +421,7 @@

    QF_AUFNIA (Single Query Track)

    - + 2020-CVC4n 0 920.62320.62492700 @@ -430,7 +430,7 @@

    QF_AUFNIA (Single Query Track)

    - + z3n 0 826.79726.79781711 @@ -439,7 +439,7 @@

    QF_AUFNIA (Single Query Track)

    - + cvc5 - fixedn 0 871.20165.10782611 @@ -448,7 +448,7 @@

    QF_AUFNIA (Single Query Track)

    - + cvc5 0 865.4465.44982611 @@ -472,7 +472,6 @@

    QF_AUFNIA (Single Query Track)

    - + - diff --git a/archive/2021/results/qf-aufnia-unsat-core.html b/archive/2021/results/qf-aufnia-unsat-core.html index 28b41ccf..476540a3 100644 --- a/archive/2021/results/qf-aufnia-unsat-core.html +++ b/archive/2021/results/qf-aufnia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AUFNIA (Unsat Core Track)

    Competition results for the QF_AUFNIA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_AUFNIA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5-uccvc5-uc - - + + @@ -126,7 +126,7 @@

    QF_AUFNIA (Unsat Core Track)

    - + z3n 0 20308 @@ -137,7 +137,7 @@

    QF_AUFNIA (Unsat Core Track)

    - + MathSAT5n 0 20117 @@ -148,7 +148,7 @@

    QF_AUFNIA (Unsat Core Track)

    - + cvc5-uc 0 20050 @@ -170,7 +170,7 @@

    QF_AUFNIA (Unsat Core Track)

    - + z3n 0 203081.6232.1550 @@ -179,7 +179,7 @@

    QF_AUFNIA (Unsat Core Track)

    - + MathSAT5n 0 201179.05210.1460 @@ -188,7 +188,7 @@

    QF_AUFNIA (Unsat Core Track)

    - + cvc5-uc 0 20050233.925233.9370 @@ -212,7 +212,6 @@

    QF_AUFNIA (Unsat Core Track)

    - + - diff --git a/archive/2021/results/qf-ax-single-query.html b/archive/2021/results/qf-ax-single-query.html index 695348c8..26309e55 100644 --- a/archive/2021/results/qf-ax-single-query.html +++ b/archive/2021/results/qf-ax-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AX (Single Query Track)

    Competition results for the QF_AX - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_AX (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Yices2Yices2Yices2 - - + + Yices2 - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_AX (Single Query Track)

    - + 2020-Yices2-fixedn 0 300 @@ -142,7 +142,7 @@

    QF_AX (Single Query Track)

    - + 2020-Yices2n 0 300 @@ -153,7 +153,7 @@

    QF_AX (Single Query Track)

    - + Yices2 0 300 @@ -164,7 +164,7 @@

    QF_AX (Single Query Track)

    - + MathSAT5n 0 300 @@ -175,7 +175,7 @@

    QF_AX (Single Query Track)

    - + z3n 0 300 @@ -186,7 +186,7 @@

    QF_AX (Single Query Track)

    - + 2020-z3n 0 300 @@ -197,7 +197,7 @@

    QF_AX (Single Query Track)

    - + cvc5 0 300 @@ -208,7 +208,7 @@

    QF_AX (Single Query Track)

    - + SMTInterpol 0 300 @@ -230,7 +230,7 @@

    QF_AX (Single Query Track)

    - + 2020-Yices2-fixedn 0 3004.1615.60130016913100 @@ -239,7 +239,7 @@

    QF_AX (Single Query Track)

    - + 2020-Yices2n 0 3004.1695.64630016913100 @@ -248,7 +248,7 @@

    QF_AX (Single Query Track)

    - + Yices2 0 3004.3166.99430016913100 @@ -257,7 +257,7 @@

    QF_AX (Single Query Track)

    - + MathSAT5n 0 30020.71620.77130016913100 @@ -266,7 +266,7 @@

    QF_AX (Single Query Track)

    - + z3n 0 30021.84121.88730016913100 @@ -275,7 +275,7 @@

    QF_AX (Single Query Track)

    - + 2020-z3n 0 30029.38329.40530016913100 @@ -284,7 +284,7 @@

    QF_AX (Single Query Track)

    - + cvc5 0 300125.607125.5730016913100 @@ -293,7 +293,7 @@

    QF_AX (Single Query Track)

    - + SMTInterpol 0 300530.635219.61830016913100 @@ -313,7 +313,7 @@

    QF_AX (Single Query Track)

    - + 2020-Yices2-fixedn 0 1690.9442.074169169001310 @@ -322,7 +322,7 @@

    QF_AX (Single Query Track)

    - + 2020-Yices2n 0 1690.982.136169169001310 @@ -331,7 +331,7 @@

    QF_AX (Single Query Track)

    - + Yices2 0 1690.9782.962169169001310 @@ -340,7 +340,7 @@

    QF_AX (Single Query Track)

    - + z3n 0 1696.4586.472169169001310 @@ -349,7 +349,7 @@

    QF_AX (Single Query Track)

    - + MathSAT5n 0 1698.0718.081169169001310 @@ -358,7 +358,7 @@

    QF_AX (Single Query Track)

    - + 2020-z3n 0 1699.0239.034169169001310 @@ -367,7 +367,7 @@

    QF_AX (Single Query Track)

    - + cvc5 0 16910.92210.854169169001310 @@ -376,7 +376,7 @@

    QF_AX (Single Query Track)

    - + SMTInterpol 0 169142.98973.845169169001310 @@ -396,7 +396,7 @@

    QF_AX (Single Query Track)

    - + 2020-Yices2n 0 1313.1893.511131013101690 @@ -405,7 +405,7 @@

    QF_AX (Single Query Track)

    - + 2020-Yices2-fixedn 0 1313.2173.527131013101690 @@ -414,7 +414,7 @@

    QF_AX (Single Query Track)

    - + Yices2 0 1313.3384.032131013101690 @@ -423,7 +423,7 @@

    QF_AX (Single Query Track)

    - + MathSAT5n 0 13112.64512.69131013101690 @@ -432,7 +432,7 @@

    QF_AX (Single Query Track)

    - + z3n 0 13115.38415.414131013101690 @@ -441,7 +441,7 @@

    QF_AX (Single Query Track)

    - + 2020-z3n 0 13120.3620.371131013101690 @@ -450,7 +450,7 @@

    QF_AX (Single Query Track)

    - + cvc5 0 131114.685114.716131013101690 @@ -459,7 +459,7 @@

    QF_AX (Single Query Track)

    - + SMTInterpol 0 131387.646145.773131013101690 @@ -479,7 +479,7 @@

    QF_AX (Single Query Track)

    - + 2020-Yices2-fixedn 0 3004.1615.60130016913100 @@ -488,7 +488,7 @@

    QF_AX (Single Query Track)

    - + 2020-Yices2n 0 3004.1695.64630016913100 @@ -497,7 +497,7 @@

    QF_AX (Single Query Track)

    - + Yices2 0 3004.3166.99430016913100 @@ -506,7 +506,7 @@

    QF_AX (Single Query Track)

    - + MathSAT5n 0 30020.71620.77130016913100 @@ -515,7 +515,7 @@

    QF_AX (Single Query Track)

    - + z3n 0 30021.84121.88730016913100 @@ -524,7 +524,7 @@

    QF_AX (Single Query Track)

    - + 2020-z3n 0 30029.38329.40530016913100 @@ -533,7 +533,7 @@

    QF_AX (Single Query Track)

    - + SMTInterpol 0 300530.635219.61830016913100 @@ -542,7 +542,7 @@

    QF_AX (Single Query Track)

    - + cvc5 0 298119.965119.92129816912922 @@ -566,7 +566,6 @@

    QF_AX (Single Query Track)

    - + - diff --git a/archive/2021/results/qf-ax-unsat-core.html b/archive/2021/results/qf-ax-unsat-core.html index b3cb38bb..93421d42 100644 --- a/archive/2021/results/qf-ax-unsat-core.html +++ b/archive/2021/results/qf-ax-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AX (Unsat Core Track)

    Competition results for the QF_AX - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_AX (Unsat Core Track)

    Sequential PerformanceParallel Performance Yices2Yices2 - - + + @@ -126,7 +126,7 @@

    QF_AX (Unsat Core Track)

    - + Yices2 0 38647 @@ -137,7 +137,7 @@

    QF_AX (Unsat Core Track)

    - + 2020-Yices2-fixedn 0 38647 @@ -148,7 +148,7 @@

    QF_AX (Unsat Core Track)

    - + z3n 0 38390 @@ -159,7 +159,7 @@

    QF_AX (Unsat Core Track)

    - + cvc5-uc 0 32091 @@ -170,7 +170,7 @@

    QF_AX (Unsat Core Track)

    - + MathSAT5n 0 3573 @@ -181,7 +181,7 @@

    QF_AX (Unsat Core Track)

    - + SMTInterpol-remus 0 721 @@ -192,7 +192,7 @@

    QF_AX (Unsat Core Track)

    - + SMTInterpol 0 498 @@ -203,7 +203,7 @@

    QF_AX (Unsat Core Track)

    - + 2020-SMTInterpol-fixedn 0 481 @@ -225,7 +225,7 @@

    QF_AX (Unsat Core Track)

    - + Yices2 0 386475.8396.4790 @@ -234,7 +234,7 @@

    QF_AX (Unsat Core Track)

    - + 2020-Yices2-fixedn 0 386475.866.9830 @@ -243,7 +243,7 @@

    QF_AX (Unsat Core Track)

    - + z3n 0 3839022.97824.4750 @@ -252,7 +252,7 @@

    QF_AX (Unsat Core Track)

    - + cvc5-uc 0 320914228.6544228.831 @@ -261,7 +261,7 @@

    QF_AX (Unsat Core Track)

    - + MathSAT5n 0 357317.64719.9290 @@ -270,7 +270,7 @@

    QF_AX (Unsat Core Track)

    - + SMTInterpol-remus 0 878110507.47899912.3356 @@ -279,7 +279,7 @@

    QF_AX (Unsat Core Track)

    - + SMTInterpol 0 498431.373172.9610 @@ -288,7 +288,7 @@

    QF_AX (Unsat Core Track)

    - + 2020-SMTInterpol-fixedn 0 481358.094151.4630 @@ -312,7 +312,6 @@

    QF_AX (Unsat Core Track)

    - + - diff --git a/archive/2021/results/qf-bitvec-cloud.html b/archive/2021/results/qf-bitvec-cloud.html index 3a87e502..4f2e2b55 100644 --- a/archive/2021/results/qf-bitvec-cloud.html +++ b/archive/2021/results/qf-bitvec-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Bitvec (Cloud Track)

    Competition results for the QF_Bitvec - + division - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    QF_Bitvec (Cloud Track)

    - + STP-CMS-Cloud 0 512489.385231108 @@ -126,7 +126,7 @@

    QF_Bitvec (Cloud Track)

    - + Par4n 0 218234.03220214014 @@ -135,7 +135,7 @@

    QF_Bitvec (Cloud Track)

    - + cvc5-gg 0 019200.000016016 @@ -155,7 +155,7 @@

    QF_Bitvec (Cloud Track)

    - + STP-CMS-Cloud 0 21599.152202128 @@ -164,7 +164,7 @@

    QF_Bitvec (Cloud Track)

    - + cvc5-gg 0 04800.000041216 @@ -173,7 +173,7 @@

    QF_Bitvec (Cloud Track)

    - + Par4n 0 04800.000041214 @@ -193,7 +193,7 @@

    QF_Bitvec (Cloud Track)

    - + STP-CMS-Cloud 0 37290.23303588 @@ -202,7 +202,7 @@

    QF_Bitvec (Cloud Track)

    - + Par4n 0 28634.0322026814 @@ -211,7 +211,7 @@

    QF_Bitvec (Cloud Track)

    - + cvc5-gg 0 09600.00008816 @@ -231,7 +231,7 @@

    QF_Bitvec (Cloud Track)

    - + STP-CMS-Cloud 0 1290.7710115012 @@ -240,7 +240,7 @@

    QF_Bitvec (Cloud Track)

    - + cvc5-gg 0 0384.000016016 @@ -249,7 +249,7 @@

    QF_Bitvec (Cloud Track)

    - + Par4n 0 0384.000016016 @@ -273,7 +273,6 @@

    QF_Bitvec (Cloud Track)

    - + - diff --git a/archive/2021/results/qf-bitvec-incremental.html b/archive/2021/results/qf-bitvec-incremental.html index 550b3200..02dcf145 100644 --- a/archive/2021/results/qf-bitvec-incremental.html +++ b/archive/2021/results/qf-bitvec-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Bitvec (Incremental Track)

    Competition results for the QF_Bitvec - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_Bitvec (Incremental Track)

    Parallel Performance STP - - + + @@ -124,7 +124,7 @@

    QF_Bitvec (Incremental Track)

    - + STP 0 1578620647.82714260.3948509 @@ -133,7 +133,7 @@

    QF_Bitvec (Incremental Track)

    - + 2020-Yices2-fixed incrementaln 0 1576023163.02823018.677511011 @@ -142,7 +142,7 @@

    QF_Bitvec (Incremental Track)

    - + MathSAT5n 0 1575935295.66335098.652512016 @@ -151,7 +151,7 @@

    QF_Bitvec (Incremental Track)

    - + Yices2 incremental 0 1575623251.64923134.167515012 @@ -160,7 +160,7 @@

    QF_Bitvec (Incremental Track)

    - + cvc5-inc 0 1572074203.16274025.434551013 @@ -169,7 +169,7 @@

    QF_Bitvec (Incremental Track)

    - + Bitwuzla 0 1570919959.04519840.33556207 @@ -178,7 +178,7 @@

    QF_Bitvec (Incremental Track)

    - + z3n 0 15544191060.419190976.399727089 @@ -202,7 +202,6 @@

    QF_Bitvec (Incremental Track)

    - + - diff --git a/archive/2021/results/qf-bitvec-model-validation.html b/archive/2021/results/qf-bitvec-model-validation.html index 941d6d45..dc6ce61b 100644 --- a/archive/2021/results/qf-bitvec-model-validation.html +++ b/archive/2021/results/qf-bitvec-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Bitvec (Model Validation Track)

    Competition results for the QF_Bitvec - + division - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_Bitvec (Model Validation Track)

    Sequential PerformanceParallel Performance BitwuzlaBitwuzla - - + + @@ -126,7 +126,7 @@

    QF_Bitvec (Model Validation Track)

    - + 2020-Bitwuzlan 0 7225 @@ -137,7 +137,7 @@

    QF_Bitvec (Model Validation Track)

    - + Bitwuzla 0 7224 @@ -148,7 +148,7 @@

    QF_Bitvec (Model Validation Track)

    - + Yices2 model-validation 0 7190 @@ -159,7 +159,7 @@

    QF_Bitvec (Model Validation Track)

    - + STP 0 7108 @@ -170,7 +170,7 @@

    QF_Bitvec (Model Validation Track)

    - + cvc5-mv 0 6950 @@ -181,7 +181,7 @@

    QF_Bitvec (Model Validation Track)

    - + z3-mvn 0 6894 @@ -192,7 +192,7 @@

    QF_Bitvec (Model Validation Track)

    - + MathSAT5n 0 6720 @@ -214,7 +214,7 @@

    QF_Bitvec (Model Validation Track)

    - + 2020-Bitwuzlan 0 7225109758.181109565.98943 @@ -223,7 +223,7 @@

    QF_Bitvec (Model Validation Track)

    - + Bitwuzla 0 7224110138.173109703.78244 @@ -232,7 +232,7 @@

    QF_Bitvec (Model Validation Track)

    - + Yices2 model-validation 0 7190156909.795156750.68178 @@ -241,7 +241,7 @@

    QF_Bitvec (Model Validation Track)

    - + STP 0 7179401150.49186270.59488 @@ -250,7 +250,7 @@

    QF_Bitvec (Model Validation Track)

    - + cvc5-mv 0 6950529193.012528672.241310 @@ -259,7 +259,7 @@

    QF_Bitvec (Model Validation Track)

    - + z3-mvn 0 6894640279.459639745.88302 @@ -268,7 +268,7 @@

    QF_Bitvec (Model Validation Track)

    - + MathSAT5n 0 6720709799.65709472.473433 @@ -292,7 +292,6 @@

    QF_Bitvec (Model Validation Track)

    - + - diff --git a/archive/2021/results/qf-bitvec-parallel.html b/archive/2021/results/qf-bitvec-parallel.html index 4ecd72b6..b758818e 100644 --- a/archive/2021/results/qf-bitvec-parallel.html +++ b/archive/2021/results/qf-bitvec-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Bitvec (Parallel Track)

    Competition results for the QF_Bitvec - + division - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    QF_Bitvec (Parallel Track)

    - + Par4n 0 219098.11820215015 @@ -126,7 +126,7 @@

    QF_Bitvec (Parallel Track)

    - + STP-parallel 0 116800.0110116014 @@ -135,7 +135,7 @@

    QF_Bitvec (Parallel Track)

    - + cvc5-gg 0 019200.000017016 @@ -155,7 +155,7 @@

    QF_Bitvec (Parallel Track)

    - + STP-parallel 0 02400.000041314 @@ -164,7 +164,7 @@

    QF_Bitvec (Parallel Track)

    - + cvc5-gg 0 04800.000041316 @@ -173,7 +173,7 @@

    QF_Bitvec (Parallel Track)

    - + Par4n 0 04800.000041315 @@ -193,7 +193,7 @@

    QF_Bitvec (Parallel Track)

    - + Par4n 0 29498.1182027815 @@ -202,7 +202,7 @@

    QF_Bitvec (Parallel Track)

    - + STP-parallel 0 19600.011018814 @@ -211,7 +211,7 @@

    QF_Bitvec (Parallel Track)

    - + cvc5-gg 0 010800.00009816 @@ -231,7 +231,7 @@

    QF_Bitvec (Parallel Track)

    - + STP-parallel 0 1336.0110116014 @@ -240,7 +240,7 @@

    QF_Bitvec (Parallel Track)

    - + cvc5-gg 0 0384.000017016 @@ -249,7 +249,7 @@

    QF_Bitvec (Parallel Track)

    - + Par4n 0 0408.000017017 @@ -273,7 +273,6 @@

    QF_Bitvec (Parallel Track)

    - + - diff --git a/archive/2021/results/qf-bitvec-single-query.html b/archive/2021/results/qf-bitvec-single-query.html index 3413c78d..fa157e30 100644 --- a/archive/2021/results/qf-bitvec-single-query.html +++ b/archive/2021/results/qf-bitvec-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Bitvec (Single Query Track)

    Competition results for the QF_Bitvec - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_Bitvec (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) BitwuzlaBitwuzlaBitwuzla - - + + Yices2 - - + + Bitwuzla - + @@ -131,7 +131,7 @@

    QF_Bitvec (Single Query Track)

    - + 2020-Bitwuzla-fixedn 0 8644 @@ -142,7 +142,7 @@

    QF_Bitvec (Single Query Track)

    - + Bitwuzla 0 8610 @@ -153,7 +153,7 @@

    QF_Bitvec (Single Query Track)

    - + Yices2 0 8598 @@ -164,7 +164,7 @@

    QF_Bitvec (Single Query Track)

    - + STP 0 8425 @@ -175,7 +175,7 @@

    QF_Bitvec (Single Query Track)

    - + cvc5 0 8135 @@ -186,7 +186,7 @@

    QF_Bitvec (Single Query Track)

    - + MathSAT5n 0 7645 @@ -197,7 +197,7 @@

    QF_Bitvec (Single Query Track)

    - + z3n 0 7406 @@ -219,7 +219,7 @@

    QF_Bitvec (Single Query Track)

    - + 2020-Bitwuzla-fixedn 0 8644376584.659376178.3398644319354511840184 @@ -228,7 +228,7 @@

    QF_Bitvec (Single Query Track)

    - + Bitwuzla 0 8610411647.978411141.7688610319054202180218 @@ -237,7 +237,7 @@

    QF_Bitvec (Single Query Track)

    - + Yices2 0 8598437075.315436889.5148598316254362300230 @@ -246,7 +246,7 @@

    QF_Bitvec (Single Query Track)

    - + STP 0 8572944672.975475022.7548572314854242560250 @@ -255,7 +255,7 @@

    QF_Bitvec (Single Query Track)

    - + cvc5 0 81351222929.9141221931.018135308550506930685 @@ -264,7 +264,7 @@

    QF_Bitvec (Single Query Track)

    - + MathSAT5n 0 76451820011.4151819115.097764527754870118301180 @@ -273,7 +273,7 @@

    QF_Bitvec (Single Query Track)

    - + z3n 0 74062178693.8972178282.533740629204486142201422 @@ -293,7 +293,7 @@

    QF_Bitvec (Single Query Track)

    - + 2020-Bitwuzla-fixedn 0 3193134450.495134221.819319331930525583184 @@ -302,7 +302,7 @@

    QF_Bitvec (Single Query Track)

    - + Bitwuzla 0 3190133244.928133033.779319031900555583218 @@ -311,7 +311,7 @@

    QF_Bitvec (Single Query Track)

    - + Yices2 0 3162180483.722180332.817316231620835583230 @@ -320,7 +320,7 @@

    QF_Bitvec (Single Query Track)

    - + STP 0 3148461089.945208185.981314831480975583250 @@ -329,7 +329,7 @@

    QF_Bitvec (Single Query Track)

    - + cvc5 0 3085358540.702357804.2143085308501605583685 @@ -338,7 +338,7 @@

    QF_Bitvec (Single Query Track)

    - + z3n 0 2920679296.461679104.28529202920032555831422 @@ -347,7 +347,7 @@

    QF_Bitvec (Single Query Track)

    - + MathSAT5n 0 2775761625.044760938.49927752775047055831180 @@ -367,7 +367,7 @@

    QF_Bitvec (Single Query Track)

    - + 2020-Bitwuzla-fixedn 0 5451185734.164185556.521545105451853292184 @@ -376,7 +376,7 @@

    QF_Bitvec (Single Query Track)

    - + Yices2 0 5436200191.593200156.6975436054361003292230 @@ -385,7 +385,7 @@

    QF_Bitvec (Single Query Track)

    - + STP 0 5424427183.03210436.7735424054241123292250 @@ -394,7 +394,7 @@

    QF_Bitvec (Single Query Track)

    - + Bitwuzla 0 5420222003.05221707.9895420054201163292218 @@ -403,7 +403,7 @@

    QF_Bitvec (Single Query Track)

    - + cvc5 0 5050807989.212807726.7965050050504863292685 @@ -412,7 +412,7 @@

    QF_Bitvec (Single Query Track)

    - + MathSAT5n 0 48701001986.3711001776.59848700487066632921180 @@ -421,7 +421,7 @@

    QF_Bitvec (Single Query Track)

    - + z3n 0 44861442997.4361442778.248448604486105032921422 @@ -441,7 +441,7 @@

    QF_Bitvec (Single Query Track)

    - + 2020-Bitwuzla-fixedn 0 792643027.9242742.5517926285250749020902 @@ -450,7 +450,7 @@

    QF_Bitvec (Single Query Track)

    - + Bitwuzla 0 790943831.93543515.2357909284350669190919 @@ -459,7 +459,7 @@

    QF_Bitvec (Single Query Track)

    - + Yices2 0 788333343.19533253.7047883272351609450945 @@ -468,7 +468,7 @@

    QF_Bitvec (Single Query Track)

    - + STP 0 766662996.22941348.748766626814985116201161 @@ -477,7 +477,7 @@

    QF_Bitvec (Single Query Track)

    - + cvc5 0 623691507.33390968.544623619994237259202590 @@ -486,7 +486,7 @@

    QF_Bitvec (Single Query Track)

    - + MathSAT5n 0 604983742.91883423.143604920294020277902776 @@ -495,7 +495,7 @@

    QF_Bitvec (Single Query Track)

    - + z3n 0 556895207.06194982.188556818493719326003260 @@ -519,7 +519,6 @@

    QF_Bitvec (Single Query Track)

    - + - diff --git a/archive/2021/results/qf-bitvec-unsat-core.html b/archive/2021/results/qf-bitvec-unsat-core.html index ee993699..fea1c74a 100644 --- a/archive/2021/results/qf-bitvec-unsat-core.html +++ b/archive/2021/results/qf-bitvec-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Bitvec (Unsat Core Track)

    Competition results for the QF_Bitvec - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_Bitvec (Unsat Core Track)

    Sequential PerformanceParallel Performance BitwuzlaBitwuzla - - + + @@ -126,7 +126,7 @@

    QF_Bitvec (Unsat Core Track)

    - + 2020-Bitwuzla-fixedn 0 2091261 @@ -137,7 +137,7 @@

    QF_Bitvec (Unsat Core Track)

    - + Bitwuzla 0 2083241 @@ -148,7 +148,7 @@

    QF_Bitvec (Unsat Core Track)

    - + cvc5-uc 0 1946771 @@ -159,7 +159,7 @@

    QF_Bitvec (Unsat Core Track)

    - + Yices2 0 1813707 @@ -170,7 +170,7 @@

    QF_Bitvec (Unsat Core Track)

    - + z3n 0 1475365 @@ -181,7 +181,7 @@

    QF_Bitvec (Unsat Core Track)

    - + MathSAT5n 0 0 @@ -203,7 +203,7 @@

    QF_Bitvec (Unsat Core Track)

    - + 2020-Bitwuzla-fixedn 0 2091261233735.427233577.105122 @@ -212,7 +212,7 @@

    QF_Bitvec (Unsat Core Track)

    - + Bitwuzla 0 2083241239551.594239453.571126 @@ -221,7 +221,7 @@

    QF_Bitvec (Unsat Core Track)

    - + cvc5-uc 0 1946771865562.749865344.859562 @@ -230,7 +230,7 @@

    QF_Bitvec (Unsat Core Track)

    - + Yices2 0 1813707358281.523358278.747272 @@ -239,7 +239,7 @@

    QF_Bitvec (Unsat Core Track)

    - + z3n 0 14753651463773.0931465229.2351101 @@ -248,7 +248,7 @@

    QF_Bitvec (Unsat Core Track)

    - + MathSAT5n 0 0217.866225.1420 @@ -272,7 +272,6 @@

    QF_Bitvec (Unsat Core Track)

    - + - diff --git a/archive/2021/results/qf-bv-cloud.html b/archive/2021/results/qf-bv-cloud.html index b184495c..f4c51a06 100644 --- a/archive/2021/results/qf-bv-cloud.html +++ b/archive/2021/results/qf-bv-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_BV (Cloud Track)

    Competition results for the QF_BV - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    QF_BV (Cloud Track)

    - + STP-CMS-Cloud 0 512489.38523118 @@ -126,7 +126,7 @@

    QF_BV (Cloud Track)

    - + Par4n 0 218234.0322021414 @@ -135,7 +135,7 @@

    QF_BV (Cloud Track)

    - + cvc5-gg 0 019200.00001616 @@ -155,7 +155,7 @@

    QF_BV (Cloud Track)

    - + STP-CMS-Cloud 0 21599.152202128 @@ -164,7 +164,7 @@

    QF_BV (Cloud Track)

    - + cvc5-gg 0 04800.000041216 @@ -173,7 +173,7 @@

    QF_BV (Cloud Track)

    - + Par4n 0 04800.000041214 @@ -193,7 +193,7 @@

    QF_BV (Cloud Track)

    - + STP-CMS-Cloud 0 37290.23303588 @@ -202,7 +202,7 @@

    QF_BV (Cloud Track)

    - + Par4n 0 28634.0322026814 @@ -211,7 +211,7 @@

    QF_BV (Cloud Track)

    - + cvc5-gg 0 09600.00008816 @@ -231,7 +231,7 @@

    QF_BV (Cloud Track)

    - + STP-CMS-Cloud 0 1290.771011512 @@ -240,7 +240,7 @@

    QF_BV (Cloud Track)

    - + cvc5-gg 0 0384.00001616 @@ -249,7 +249,7 @@

    QF_BV (Cloud Track)

    - + Par4n 0 0384.00001616 @@ -273,7 +273,6 @@

    QF_BV (Cloud Track)

    - + - diff --git a/archive/2021/results/qf-bv-incremental.html b/archive/2021/results/qf-bv-incremental.html index 3b13dcbd..7cbb35d9 100644 --- a/archive/2021/results/qf-bv-incremental.html +++ b/archive/2021/results/qf-bv-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_BV (Incremental Track)

    Competition results for the QF_BV - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_BV (Incremental Track)

    Parallel Performance STP - - + + @@ -124,7 +124,7 @@

    QF_BV (Incremental Track)

    - + STP 0 1578620647.82714260.394859 @@ -133,7 +133,7 @@

    QF_BV (Incremental Track)

    - + 2020-Yices2-fixed incrementaln 0 1576023163.02823018.67751111 @@ -142,7 +142,7 @@

    QF_BV (Incremental Track)

    - + MathSAT5n 0 1575935295.66335098.65251216 @@ -151,7 +151,7 @@

    QF_BV (Incremental Track)

    - + Yices2 incremental 0 1575623251.64923134.16751512 @@ -160,7 +160,7 @@

    QF_BV (Incremental Track)

    - + cvc5-inc 0 1572074203.16274025.43455113 @@ -169,7 +169,7 @@

    QF_BV (Incremental Track)

    - + Bitwuzla 0 1570919959.04519840.3355627 @@ -178,7 +178,7 @@

    QF_BV (Incremental Track)

    - + z3n 0 15544191060.419190976.39972789 @@ -202,7 +202,6 @@

    QF_BV (Incremental Track)

    - + - diff --git a/archive/2021/results/qf-bv-model-validation.html b/archive/2021/results/qf-bv-model-validation.html index eea8ccc2..e4ba903c 100644 --- a/archive/2021/results/qf-bv-model-validation.html +++ b/archive/2021/results/qf-bv-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_BV (Model Validation Track)

    Competition results for the QF_BV - + logic - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_BV (Model Validation Track)

    Sequential PerformanceParallel Performance BitwuzlaBitwuzla - - + + @@ -126,7 +126,7 @@

    QF_BV (Model Validation Track)

    - + 2020-Bitwuzlan 0 7225 @@ -137,7 +137,7 @@

    QF_BV (Model Validation Track)

    - + Bitwuzla 0 7224 @@ -148,7 +148,7 @@

    QF_BV (Model Validation Track)

    - + Yices2 model-validation 0 7190 @@ -159,7 +159,7 @@

    QF_BV (Model Validation Track)

    - + STP 0 7108 @@ -170,7 +170,7 @@

    QF_BV (Model Validation Track)

    - + cvc5-mv 0 6950 @@ -181,7 +181,7 @@

    QF_BV (Model Validation Track)

    - + z3-mvn 0 6894 @@ -192,7 +192,7 @@

    QF_BV (Model Validation Track)

    - + MathSAT5n 0 6720 @@ -214,7 +214,7 @@

    QF_BV (Model Validation Track)

    - + 2020-Bitwuzlan 0 7225109758.181109565.98943 @@ -223,7 +223,7 @@

    QF_BV (Model Validation Track)

    - + Bitwuzla 0 7224110138.173109703.78244 @@ -232,7 +232,7 @@

    QF_BV (Model Validation Track)

    - + Yices2 model-validation 0 7190156909.795156750.68178 @@ -241,7 +241,7 @@

    QF_BV (Model Validation Track)

    - + STP 0 7179401150.49186270.59488 @@ -250,7 +250,7 @@

    QF_BV (Model Validation Track)

    - + cvc5-mv 0 6950529193.012528672.241310 @@ -259,7 +259,7 @@

    QF_BV (Model Validation Track)

    - + z3-mvn 0 6894640279.459639745.88302 @@ -268,7 +268,7 @@

    QF_BV (Model Validation Track)

    - + MathSAT5n 0 6720709799.65709472.473433 @@ -292,7 +292,6 @@

    QF_BV (Model Validation Track)

    - + - diff --git a/archive/2021/results/qf-bv-parallel.html b/archive/2021/results/qf-bv-parallel.html index 87487dbe..9f15fa9b 100644 --- a/archive/2021/results/qf-bv-parallel.html +++ b/archive/2021/results/qf-bv-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_BV (Parallel Track)

    Competition results for the QF_BV - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    QF_BV (Parallel Track)

    - + Par4n 0 219098.1182021515 @@ -126,7 +126,7 @@

    QF_BV (Parallel Track)

    - + STP-parallel 0 116800.011011614 @@ -135,7 +135,7 @@

    QF_BV (Parallel Track)

    - + cvc5-gg 0 019200.00001716 @@ -155,7 +155,7 @@

    QF_BV (Parallel Track)

    - + STP-parallel 0 02400.000041314 @@ -164,7 +164,7 @@

    QF_BV (Parallel Track)

    - + cvc5-gg 0 04800.000041316 @@ -173,7 +173,7 @@

    QF_BV (Parallel Track)

    - + Par4n 0 04800.000041315 @@ -193,7 +193,7 @@

    QF_BV (Parallel Track)

    - + Par4n 0 29498.1182027815 @@ -202,7 +202,7 @@

    QF_BV (Parallel Track)

    - + STP-parallel 0 19600.011018814 @@ -211,7 +211,7 @@

    QF_BV (Parallel Track)

    - + cvc5-gg 0 010800.00009816 @@ -231,7 +231,7 @@

    QF_BV (Parallel Track)

    - + STP-parallel 0 1336.011011614 @@ -240,7 +240,7 @@

    QF_BV (Parallel Track)

    - + cvc5-gg 0 0384.00001716 @@ -249,7 +249,7 @@

    QF_BV (Parallel Track)

    - + Par4n 0 0408.00001717 @@ -273,7 +273,6 @@

    QF_BV (Parallel Track)

    - + - diff --git a/archive/2021/results/qf-bv-single-query.html b/archive/2021/results/qf-bv-single-query.html index 2c06ec31..00a0f339 100644 --- a/archive/2021/results/qf-bv-single-query.html +++ b/archive/2021/results/qf-bv-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_BV (Single Query Track)

    Competition results for the QF_BV - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_BV (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) BitwuzlaBitwuzlaBitwuzla - - + + Yices2 - - + + Bitwuzla - + @@ -131,7 +131,7 @@

    QF_BV (Single Query Track)

    - + 2020-Bitwuzla-fixedn 0 8644 @@ -142,7 +142,7 @@

    QF_BV (Single Query Track)

    - + Bitwuzla 0 8610 @@ -153,7 +153,7 @@

    QF_BV (Single Query Track)

    - + Yices2 0 8598 @@ -164,7 +164,7 @@

    QF_BV (Single Query Track)

    - + STP 0 8425 @@ -175,7 +175,7 @@

    QF_BV (Single Query Track)

    - + cvc5 0 8135 @@ -186,7 +186,7 @@

    QF_BV (Single Query Track)

    - + MathSAT5n 0 7645 @@ -197,7 +197,7 @@

    QF_BV (Single Query Track)

    - + z3n 0 7406 @@ -219,7 +219,7 @@

    QF_BV (Single Query Track)

    - + 2020-Bitwuzla-fixedn 0 8644376584.659376178.339864431935451184184 @@ -228,7 +228,7 @@

    QF_BV (Single Query Track)

    - + Bitwuzla 0 8610411647.978411141.768861031905420218218 @@ -237,7 +237,7 @@

    QF_BV (Single Query Track)

    - + Yices2 0 8598437075.315436889.514859831625436230230 @@ -246,7 +246,7 @@

    QF_BV (Single Query Track)

    - + STP 0 8572944672.975475022.754857231485424256250 @@ -255,7 +255,7 @@

    QF_BV (Single Query Track)

    - + cvc5 0 81351222929.9141221931.01813530855050693685 @@ -264,7 +264,7 @@

    QF_BV (Single Query Track)

    - + MathSAT5n 0 76451820011.4151819115.09776452775487011831180 @@ -273,7 +273,7 @@

    QF_BV (Single Query Track)

    - + z3n 0 74062178693.8972178282.53374062920448614221422 @@ -293,7 +293,7 @@

    QF_BV (Single Query Track)

    - + 2020-Bitwuzla-fixedn 0 3193134450.495134221.819319331930525583184 @@ -302,7 +302,7 @@

    QF_BV (Single Query Track)

    - + Bitwuzla 0 3190133244.928133033.779319031900555583218 @@ -311,7 +311,7 @@

    QF_BV (Single Query Track)

    - + Yices2 0 3162180483.722180332.817316231620835583230 @@ -320,7 +320,7 @@

    QF_BV (Single Query Track)

    - + STP 0 3148461089.945208185.981314831480975583250 @@ -329,7 +329,7 @@

    QF_BV (Single Query Track)

    - + cvc5 0 3085358540.702357804.2143085308501605583685 @@ -338,7 +338,7 @@

    QF_BV (Single Query Track)

    - + z3n 0 2920679296.461679104.28529202920032555831422 @@ -347,7 +347,7 @@

    QF_BV (Single Query Track)

    - + MathSAT5n 0 2775761625.044760938.49927752775047055831180 @@ -367,7 +367,7 @@

    QF_BV (Single Query Track)

    - + 2020-Bitwuzla-fixedn 0 5451185734.164185556.521545105451853292184 @@ -376,7 +376,7 @@

    QF_BV (Single Query Track)

    - + Yices2 0 5436200191.593200156.6975436054361003292230 @@ -385,7 +385,7 @@

    QF_BV (Single Query Track)

    - + STP 0 5424427183.03210436.7735424054241123292250 @@ -394,7 +394,7 @@

    QF_BV (Single Query Track)

    - + Bitwuzla 0 5420222003.05221707.9895420054201163292218 @@ -403,7 +403,7 @@

    QF_BV (Single Query Track)

    - + cvc5 0 5050807989.212807726.7965050050504863292685 @@ -412,7 +412,7 @@

    QF_BV (Single Query Track)

    - + MathSAT5n 0 48701001986.3711001776.59848700487066632921180 @@ -421,7 +421,7 @@

    QF_BV (Single Query Track)

    - + z3n 0 44861442997.4361442778.248448604486105032921422 @@ -441,7 +441,7 @@

    QF_BV (Single Query Track)

    - + 2020-Bitwuzla-fixedn 0 792643027.9242742.551792628525074902902 @@ -450,7 +450,7 @@

    QF_BV (Single Query Track)

    - + Bitwuzla 0 790943831.93543515.235790928435066919919 @@ -459,7 +459,7 @@

    QF_BV (Single Query Track)

    - + Yices2 0 788333343.19533253.704788327235160945945 @@ -468,7 +468,7 @@

    QF_BV (Single Query Track)

    - + STP 0 766662996.22941348.74876662681498511621161 @@ -477,7 +477,7 @@

    QF_BV (Single Query Track)

    - + cvc5 0 623691507.33390968.54462361999423725922590 @@ -486,7 +486,7 @@

    QF_BV (Single Query Track)

    - + MathSAT5n 0 604983742.91883423.14360492029402027792776 @@ -495,7 +495,7 @@

    QF_BV (Single Query Track)

    - + z3n 0 556895207.06194982.18855681849371932603260 @@ -519,7 +519,6 @@

    QF_BV (Single Query Track)

    - + - diff --git a/archive/2021/results/qf-bv-unsat-core.html b/archive/2021/results/qf-bv-unsat-core.html index 5a07a7b6..b53ad996 100644 --- a/archive/2021/results/qf-bv-unsat-core.html +++ b/archive/2021/results/qf-bv-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_BV (Unsat Core Track)

    Competition results for the QF_BV - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_BV (Unsat Core Track)

    Sequential PerformanceParallel Performance BitwuzlaBitwuzla - - + + @@ -126,7 +126,7 @@

    QF_BV (Unsat Core Track)

    - + 2020-Bitwuzla-fixedn 0 2091261 @@ -137,7 +137,7 @@

    QF_BV (Unsat Core Track)

    - + Bitwuzla 0 2083241 @@ -148,7 +148,7 @@

    QF_BV (Unsat Core Track)

    - + cvc5-uc 0 1946771 @@ -159,7 +159,7 @@

    QF_BV (Unsat Core Track)

    - + Yices2 0 1813707 @@ -170,7 +170,7 @@

    QF_BV (Unsat Core Track)

    - + z3n 0 1475365 @@ -181,7 +181,7 @@

    QF_BV (Unsat Core Track)

    - + MathSAT5n 0 0 @@ -203,7 +203,7 @@

    QF_BV (Unsat Core Track)

    - + 2020-Bitwuzla-fixedn 0 2091261233735.427233577.105122 @@ -212,7 +212,7 @@

    QF_BV (Unsat Core Track)

    - + Bitwuzla 0 2083241239551.594239453.571126 @@ -221,7 +221,7 @@

    QF_BV (Unsat Core Track)

    - + cvc5-uc 0 1946771865562.749865344.859562 @@ -230,7 +230,7 @@

    QF_BV (Unsat Core Track)

    - + Yices2 0 1813707358281.523358278.747272 @@ -239,7 +239,7 @@

    QF_BV (Unsat Core Track)

    - + z3n 0 14753651463773.0931465229.2351101 @@ -248,7 +248,7 @@

    QF_BV (Unsat Core Track)

    - + MathSAT5n 0 0217.866225.1420 @@ -272,7 +272,6 @@

    QF_BV (Unsat Core Track)

    - + - diff --git a/archive/2021/results/qf-bvfp-cloud.html b/archive/2021/results/qf-bvfp-cloud.html index 107d92b8..f477fedf 100644 --- a/archive/2021/results/qf-bvfp-cloud.html +++ b/archive/2021/results/qf-bvfp-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_BVFP (Cloud Track)

    Competition results for the QF_BVFP - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    QF_BVFP (Cloud Track)

    - + cvc5-gg 0 12458.92810122 @@ -126,7 +126,7 @@

    QF_BVFP (Cloud Track)

    - + Par4n 1 20.02322010 @@ -146,7 +146,7 @@

    QF_BVFP (Cloud Track)

    - + cvc5-gg 0 02400.0000212 @@ -155,7 +155,7 @@

    QF_BVFP (Cloud Track)

    - + Par4n 1 20.023220010 @@ -175,7 +175,7 @@

    QF_BVFP (Cloud Track)

    - + cvc5-gg 0 158.928101022 @@ -184,7 +184,7 @@

    QF_BVFP (Cloud Track)

    - + Par4n 0 00.0000120 @@ -204,7 +204,7 @@

    QF_BVFP (Cloud Track)

    - + cvc5-gg 0 072.000033 @@ -213,7 +213,7 @@

    QF_BVFP (Cloud Track)

    - + Par4n 1 20.02322010 @@ -237,7 +237,6 @@

    QF_BVFP (Cloud Track)

    - + - diff --git a/archive/2021/results/qf-bvfp-incremental.html b/archive/2021/results/qf-bvfp-incremental.html index b78b807b..f3688871 100644 --- a/archive/2021/results/qf-bvfp-incremental.html +++ b/archive/2021/results/qf-bvfp-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_BVFP (Incremental Track)

    Competition results for the QF_BVFP - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_BVFP (Incremental Track)

    Parallel Performance Bitwuzla - - + + @@ -124,7 +124,7 @@

    QF_BVFP (Incremental Track)

    - + Bitwuzla 0 195810563.05410563.372127 @@ -133,7 +133,7 @@

    QF_BVFP (Incremental Track)

    - + Bitwuzla - fixedn 0 195810585.57210585.965127 @@ -142,7 +142,7 @@

    QF_BVFP (Incremental Track)

    - + 2020-Bitwuzla-fixedn 0 195810981.55910982.002127 @@ -151,7 +151,7 @@

    QF_BVFP (Incremental Track)

    - + cvc5-inc 0 195612962.46612959.252148 @@ -160,7 +160,7 @@

    QF_BVFP (Incremental Track)

    - + 2020-CVC4-incn 0 195116283.74216283.1671911 @@ -169,7 +169,7 @@

    QF_BVFP (Incremental Track)

    - + z3n 0 184761749.45561740.32912341 @@ -178,7 +178,7 @@

    QF_BVFP (Incremental Track)

    - + MathSAT5n 1 195214865.36714859.921810 @@ -202,7 +202,6 @@

    QF_BVFP (Incremental Track)

    - + - diff --git a/archive/2021/results/qf-bvfp-parallel.html b/archive/2021/results/qf-bvfp-parallel.html index 5392583b..4d00f707 100644 --- a/archive/2021/results/qf-bvfp-parallel.html +++ b/archive/2021/results/qf-bvfp-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_BVFP (Parallel Track)

    Competition results for the QF_BVFP - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    QF_BVFP (Parallel Track)

    - + cvc5-gg 0 12460.02610122 @@ -126,7 +126,7 @@

    QF_BVFP (Parallel Track)

    - + Par4n 1 20.02722010 @@ -146,7 +146,7 @@

    QF_BVFP (Parallel Track)

    - + cvc5-gg 0 02400.0000212 @@ -155,7 +155,7 @@

    QF_BVFP (Parallel Track)

    - + Par4n 1 20.027220010 @@ -175,7 +175,7 @@

    QF_BVFP (Parallel Track)

    - + cvc5-gg 0 160.026101022 @@ -184,7 +184,7 @@

    QF_BVFP (Parallel Track)

    - + Par4n 0 00.0000120 @@ -204,7 +204,7 @@

    QF_BVFP (Parallel Track)

    - + cvc5-gg 0 072.000033 @@ -213,7 +213,7 @@

    QF_BVFP (Parallel Track)

    - + Par4n 1 20.02722010 @@ -237,7 +237,6 @@

    QF_BVFP (Parallel Track)

    - + - diff --git a/archive/2021/results/qf-bvfp-single-query.html b/archive/2021/results/qf-bvfp-single-query.html index de3a0ef3..a71ccf71 100644 --- a/archive/2021/results/qf-bvfp-single-query.html +++ b/archive/2021/results/qf-bvfp-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_BVFP (Single Query Track)

    Competition results for the QF_BVFP - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_BVFP (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) BitwuzlaBitwuzlaBitwuzla - - + + Bitwuzla - - + + Bitwuzla - + @@ -131,7 +131,7 @@

    QF_BVFP (Single Query Track)

    - + 2020-Bitwuzla-fixedn 0 484 @@ -142,7 +142,7 @@

    QF_BVFP (Single Query Track)

    - + 2020-Bitwuzlan 0 484 @@ -153,7 +153,7 @@

    QF_BVFP (Single Query Track)

    - + Bitwuzla 0 484 @@ -164,7 +164,7 @@

    QF_BVFP (Single Query Track)

    - + cvc5 0 482 @@ -175,7 +175,7 @@

    QF_BVFP (Single Query Track)

    - + 2020-CVC4n 0 482 @@ -186,7 +186,7 @@

    QF_BVFP (Single Query Track)

    - + MathSAT5n 0 478 @@ -197,7 +197,7 @@

    QF_BVFP (Single Query Track)

    - + 2020-MathSAT5n 0 478 @@ -208,7 +208,7 @@

    QF_BVFP (Single Query Track)

    - + z3n 0 477 @@ -219,7 +219,7 @@

    QF_BVFP (Single Query Track)

    - + 2020-COLIBRIn 0 451 @@ -230,7 +230,7 @@

    QF_BVFP (Single Query Track)

    - + COLIBRI - fixedn 0 451 @@ -241,7 +241,7 @@

    QF_BVFP (Single Query Track)

    - + COLIBRI 4 447 @@ -263,7 +263,7 @@

    QF_BVFP (Single Query Track)

    - + 2020-Bitwuzla-fixedn 0 4842101.4512099.87948422126300 @@ -272,7 +272,7 @@

    QF_BVFP (Single Query Track)

    - + 2020-Bitwuzlan 0 4842140.4142115.66148422126300 @@ -281,7 +281,7 @@

    QF_BVFP (Single Query Track)

    - + Bitwuzla 0 4842578.1352569.83448422126300 @@ -290,7 +290,7 @@

    QF_BVFP (Single Query Track)

    - + cvc5 0 4824945.7044909.57548222126122 @@ -299,7 +299,7 @@

    QF_BVFP (Single Query Track)

    - + 2020-CVC4n 0 4825362.4355311.66348222126122 @@ -308,7 +308,7 @@

    QF_BVFP (Single Query Track)

    - + MathSAT5n 0 47810344.65910343.35347822125766 @@ -317,7 +317,7 @@

    QF_BVFP (Single Query Track)

    - + 2020-MathSAT5n 0 47810463.53410437.98647822125766 @@ -326,7 +326,7 @@

    QF_BVFP (Single Query Track)

    - + z3n 0 47716433.75216377.79547722125677 @@ -335,7 +335,7 @@

    QF_BVFP (Single Query Track)

    - + 2020-COLIBRIn 0 4513086.4933087.17451214237332 @@ -344,7 +344,7 @@

    QF_BVFP (Single Query Track)

    - + COLIBRI - fixedn 0 4513354.4293354.494451214237332 @@ -353,7 +353,7 @@

    QF_BVFP (Single Query Track)

    - + COLIBRI 4 4472813.8452812.831447210237372 @@ -373,7 +373,7 @@

    QF_BVFP (Single Query Track)

    - + Bitwuzla 0 221181.982178.279221221002630 @@ -382,7 +382,7 @@

    QF_BVFP (Single Query Track)

    - + 2020-Bitwuzla-fixedn 0 221237.728235.528221221002630 @@ -391,7 +391,7 @@

    QF_BVFP (Single Query Track)

    - + 2020-Bitwuzlan 0 221236.587236.667221221002630 @@ -400,7 +400,7 @@

    QF_BVFP (Single Query Track)

    - + cvc5 0 221638.875628.195221221002632 @@ -409,7 +409,7 @@

    QF_BVFP (Single Query Track)

    - + 2020-CVC4n 0 221725.248722.4221221002632 @@ -418,7 +418,7 @@

    QF_BVFP (Single Query Track)

    - + MathSAT5n 0 2211172.7991173.152221221002636 @@ -427,7 +427,7 @@

    QF_BVFP (Single Query Track)

    - + 2020-MathSAT5n 0 2211213.1411213.468221221002636 @@ -436,7 +436,7 @@

    QF_BVFP (Single Query Track)

    - + z3n 0 2212089.1092089.739221221002637 @@ -445,7 +445,7 @@

    QF_BVFP (Single Query Track)

    - + COLIBRI - fixedn 0 2141446.7181445.523214214072632 @@ -454,7 +454,7 @@

    QF_BVFP (Single Query Track)

    - + 2020-COLIBRIn 0 2141528.8511529.399214214072632 @@ -463,7 +463,7 @@

    QF_BVFP (Single Query Track)

    - + COLIBRI 0 2101429.9831428.6442102100112632 @@ -483,7 +483,7 @@

    QF_BVFP (Single Query Track)

    - + 2020-Bitwuzla-fixedn 0 2631863.7241864.351263026302210 @@ -492,7 +492,7 @@

    QF_BVFP (Single Query Track)

    - + 2020-Bitwuzlan 0 2631903.8261878.994263026302210 @@ -501,7 +501,7 @@

    QF_BVFP (Single Query Track)

    - + Bitwuzla 0 2632396.1522391.555263026302210 @@ -510,7 +510,7 @@

    QF_BVFP (Single Query Track)

    - + cvc5 0 2614306.8294281.381261026122212 @@ -519,7 +519,7 @@

    QF_BVFP (Single Query Track)

    - + 2020-CVC4n 0 2614637.1874589.263261026122212 @@ -528,7 +528,7 @@

    QF_BVFP (Single Query Track)

    - + MathSAT5n 0 2579171.8619170.201257025762216 @@ -537,7 +537,7 @@

    QF_BVFP (Single Query Track)

    - + 2020-MathSAT5n 0 2579250.3939224.518257025762216 @@ -546,7 +546,7 @@

    QF_BVFP (Single Query Track)

    - + z3n 0 25614344.64214288.056256025672217 @@ -555,7 +555,7 @@

    QF_BVFP (Single Query Track)

    - + 2020-COLIBRIn 0 2371557.6421557.7712370237262212 @@ -564,7 +564,7 @@

    QF_BVFP (Single Query Track)

    - + COLIBRI - fixedn 0 2371907.7111908.9712370237262212 @@ -573,7 +573,7 @@

    QF_BVFP (Single Query Track)

    - + COLIBRI 4 2371383.8621384.1872370237262212 @@ -593,7 +593,7 @@

    QF_BVFP (Single Query Track)

    - + Bitwuzla 0 474734.209725.7594742202541010 @@ -602,7 +602,7 @@

    QF_BVFP (Single Query Track)

    - + 2020-Bitwuzla-fixedn 0 473726.453724.4184732192541111 @@ -611,7 +611,7 @@

    QF_BVFP (Single Query Track)

    - + 2020-Bitwuzlan 0 473724.72724.9394732192541111 @@ -620,7 +620,7 @@

    QF_BVFP (Single Query Track)

    - + 2020-CVC4n 0 4671433.3671429.1554672142531717 @@ -629,7 +629,7 @@

    QF_BVFP (Single Query Track)

    - + cvc5 0 4651815.0681778.7834652162491919 @@ -638,7 +638,7 @@

    QF_BVFP (Single Query Track)

    - + MathSAT5n 0 4551340.5951338.6134552142412929 @@ -647,7 +647,7 @@

    QF_BVFP (Single Query Track)

    - + 2020-MathSAT5n 0 4531400.6341374.4714532142393131 @@ -656,7 +656,7 @@

    QF_BVFP (Single Query Track)

    - + COLIBRI - fixedn 0 449548.032548.078449214235354 @@ -665,7 +665,7 @@

    QF_BVFP (Single Query Track)

    - + 2020-COLIBRIn 0 446666.054666.712446212234387 @@ -674,7 +674,7 @@

    QF_BVFP (Single Query Track)

    - + z3n 0 4202682.1552677.9624201992216464 @@ -683,7 +683,7 @@

    QF_BVFP (Single Query Track)

    - + COLIBRI 4 447461.845460.831447210237372 @@ -707,7 +707,6 @@

    QF_BVFP (Single Query Track)

    - + - diff --git a/archive/2021/results/qf-bvfp-unsat-core.html b/archive/2021/results/qf-bvfp-unsat-core.html index a345b0d8..0178a165 100644 --- a/archive/2021/results/qf-bvfp-unsat-core.html +++ b/archive/2021/results/qf-bvfp-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_BVFP (Unsat Core Track)

    Competition results for the QF_BVFP - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_BVFP (Unsat Core Track)

    Sequential PerformanceParallel Performance BitwuzlaBitwuzla - - + + @@ -126,7 +126,7 @@

    QF_BVFP (Unsat Core Track)

    - + Bitwuzla 0 14216 @@ -137,7 +137,7 @@

    QF_BVFP (Unsat Core Track)

    - + 2020-Bitwuzlan 0 14137 @@ -148,7 +148,7 @@

    QF_BVFP (Unsat Core Track)

    - + 2020-Bitwuzla-fixedn 0 14134 @@ -159,7 +159,7 @@

    QF_BVFP (Unsat Core Track)

    - + z3n 0 14074 @@ -170,7 +170,7 @@

    QF_BVFP (Unsat Core Track)

    - + cvc5-uc 0 13947 @@ -181,7 +181,7 @@

    QF_BVFP (Unsat Core Track)

    - + MathSAT5n 0 263 @@ -203,7 +203,7 @@

    QF_BVFP (Unsat Core Track)

    - + Bitwuzla 0 142163267.7653268.4151 @@ -212,7 +212,7 @@

    QF_BVFP (Unsat Core Track)

    - + 2020-Bitwuzlan 0 141373180.4343148.881 @@ -221,7 +221,7 @@

    QF_BVFP (Unsat Core Track)

    - + 2020-Bitwuzla-fixedn 0 141343264.963265.8221 @@ -230,7 +230,7 @@

    QF_BVFP (Unsat Core Track)

    - + z3n 0 1407448159.12548130.26429 @@ -239,7 +239,7 @@

    QF_BVFP (Unsat Core Track)

    - + cvc5-uc 0 139473588.0143585.5230 @@ -248,7 +248,7 @@

    QF_BVFP (Unsat Core Track)

    - + MathSAT5n 0 26329.11530.590 @@ -272,7 +272,6 @@

    QF_BVFP (Unsat Core Track)

    - + - diff --git a/archive/2021/results/qf-bvfplra-single-query.html b/archive/2021/results/qf-bvfplra-single-query.html index 289e1c93..fbf023d8 100644 --- a/archive/2021/results/qf-bvfplra-single-query.html +++ b/archive/2021/results/qf-bvfplra-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_BVFPLRA (Single Query Track)

    Competition results for the QF_BVFPLRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_BVFPLRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) BitwuzlaBitwuzlaBitwuzla - - + + Bitwuzla - - + + COLIBRI - + @@ -131,7 +131,7 @@

    QF_BVFPLRA (Single Query Track)

    - + Bitwuzla 0 64 @@ -142,7 +142,7 @@

    QF_BVFPLRA (Single Query Track)

    - + cvc5 0 63 @@ -153,7 +153,7 @@

    QF_BVFPLRA (Single Query Track)

    - + MathSAT5n 0 59 @@ -164,7 +164,7 @@

    QF_BVFPLRA (Single Query Track)

    - + 2020-MathSAT5n 0 59 @@ -175,7 +175,7 @@

    QF_BVFPLRA (Single Query Track)

    - + 2020-CVC4n 0 56 @@ -186,7 +186,7 @@

    QF_BVFPLRA (Single Query Track)

    - + COLIBRI 0 54 @@ -197,7 +197,7 @@

    QF_BVFPLRA (Single Query Track)

    - + COLIBRI - fixedn 0 54 @@ -208,7 +208,7 @@

    QF_BVFPLRA (Single Query Track)

    - + 2020-COLIBRIn 0 45 @@ -230,7 +230,7 @@

    QF_BVFPLRA (Single Query Track)

    - + Bitwuzla 0 6412508.49412507.58964392588 @@ -239,7 +239,7 @@

    QF_BVFPLRA (Single Query Track)

    - + cvc5 0 6317272.6717273.19963382599 @@ -248,7 +248,7 @@

    QF_BVFPLRA (Single Query Track)

    - + MathSAT5n 0 5920402.220356.6455938211313 @@ -257,7 +257,7 @@

    QF_BVFPLRA (Single Query Track)

    - + 2020-MathSAT5n 0 5920500.4120497.4145938211313 @@ -266,7 +266,7 @@

    QF_BVFPLRA (Single Query Track)

    - + 2020-CVC4n 0 5623553.1523550.325636201616 @@ -275,7 +275,7 @@

    QF_BVFPLRA (Single Query Track)

    - + COLIBRI - fixedn 0 548533.3588519.742543024187 @@ -284,7 +284,7 @@

    QF_BVFPLRA (Single Query Track)

    - + COLIBRI 0 548524.238524.261543024187 @@ -293,7 +293,7 @@

    QF_BVFPLRA (Single Query Track)

    - + 2020-COLIBRIn 0 4519326.86919326.8874521242716 @@ -313,7 +313,7 @@

    QF_BVFPLRA (Single Query Track)

    - + Bitwuzla 0 39172.949172.959393900338 @@ -322,7 +322,7 @@

    QF_BVFPLRA (Single Query Track)

    - + cvc5 0 381713.3451713.414383801339 @@ -331,7 +331,7 @@

    QF_BVFPLRA (Single Query Track)

    - + MathSAT5n 0 382397.8632351.2493838013313 @@ -340,7 +340,7 @@

    QF_BVFPLRA (Single Query Track)

    - + 2020-MathSAT5n 0 382393.4392391.4483838013313 @@ -349,7 +349,7 @@

    QF_BVFPLRA (Single Query Track)

    - + 2020-CVC4n 0 364784.5314784.6673636033316 @@ -358,7 +358,7 @@

    QF_BVFPLRA (Single Query Track)

    - + COLIBRI - fixedn 0 3090.84276.538303009337 @@ -367,7 +367,7 @@

    QF_BVFPLRA (Single Query Track)

    - + COLIBRI 0 3081.91481.927303009337 @@ -376,7 +376,7 @@

    QF_BVFPLRA (Single Query Track)

    - + 2020-COLIBRIn 0 2110868.37810868.39921210183316 @@ -396,7 +396,7 @@

    QF_BVFPLRA (Single Query Track)

    - + Bitwuzla 0 2512335.54512334.63250258398 @@ -405,7 +405,7 @@

    QF_BVFPLRA (Single Query Track)

    - + cvc5 0 2515559.32515559.785250258399 @@ -414,7 +414,7 @@

    QF_BVFPLRA (Single Query Track)

    - + COLIBRI 0 248442.3168442.334240249397 @@ -423,7 +423,7 @@

    QF_BVFPLRA (Single Query Track)

    - + COLIBRI - fixedn 0 248442.5168443.203240249397 @@ -432,7 +432,7 @@

    QF_BVFPLRA (Single Query Track)

    - + 2020-COLIBRIn 0 248458.4918458.4872402493916 @@ -441,7 +441,7 @@

    QF_BVFPLRA (Single Query Track)

    - + MathSAT5n 0 2118004.33718005.39621021123913 @@ -450,7 +450,7 @@

    QF_BVFPLRA (Single Query Track)

    - + 2020-MathSAT5n 0 2118106.97118105.96621021123913 @@ -459,7 +459,7 @@

    QF_BVFPLRA (Single Query Track)

    - + 2020-CVC4n 0 2018768.61918765.65220020133916 @@ -479,7 +479,7 @@

    QF_BVFPLRA (Single Query Track)

    - + COLIBRI - fixedn 0 54301.358287.742543024187 @@ -488,7 +488,7 @@

    QF_BVFPLRA (Single Query Track)

    - + COLIBRI 0 54292.23292.261543024187 @@ -497,7 +497,7 @@

    QF_BVFPLRA (Single Query Track)

    - + Bitwuzla 0 53557.942556.95337161919 @@ -506,7 +506,7 @@

    QF_BVFPLRA (Single Query Track)

    - + cvc5 0 48738.525738.554833152424 @@ -515,7 +515,7 @@

    QF_BVFPLRA (Single Query Track)

    - + 2020-CVC4n 0 45720.577717.3254531142727 @@ -524,7 +524,7 @@

    QF_BVFPLRA (Single Query Track)

    - + MathSAT5n 0 45766.195766.2054531142727 @@ -533,7 +533,7 @@

    QF_BVFPLRA (Single Query Track)

    - + 2020-COLIBRIn 0 44510.745510.764421232817 @@ -542,7 +542,7 @@

    QF_BVFPLRA (Single Query Track)

    - + 2020-MathSAT5n 0 44777.76773.5174430142828 @@ -566,7 +566,6 @@

    QF_BVFPLRA (Single Query Track)

    - + - diff --git a/archive/2021/results/qf-bvfplra-unsat-core.html b/archive/2021/results/qf-bvfplra-unsat-core.html index c8dd3b7c..00cdc4a3 100644 --- a/archive/2021/results/qf-bvfplra-unsat-core.html +++ b/archive/2021/results/qf-bvfplra-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_BVFPLRA (Unsat Core Track)

    Competition results for the QF_BVFPLRA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_BVFPLRA (Unsat Core Track)

    Sequential PerformanceParallel Performance BitwuzlaBitwuzla - - + + @@ -126,7 +126,7 @@

    QF_BVFPLRA (Unsat Core Track)

    - + Bitwuzla 0 322 @@ -137,7 +137,7 @@

    QF_BVFPLRA (Unsat Core Track)

    - + cvc5-uc 0 318 @@ -148,7 +148,7 @@

    QF_BVFPLRA (Unsat Core Track)

    - + MathSAT5n 0 0 @@ -170,7 +170,7 @@

    QF_BVFPLRA (Unsat Core Track)

    - + Bitwuzla 0 3224812.2824812.7562 @@ -179,7 +179,7 @@

    QF_BVFPLRA (Unsat Core Track)

    - + cvc5-uc 0 3187002.2047002.7652 @@ -188,7 +188,7 @@

    QF_BVFPLRA (Unsat Core Track)

    - + MathSAT5n 0 00.850.8520 @@ -212,7 +212,6 @@

    QF_BVFPLRA (Unsat Core Track)

    - + - diff --git a/archive/2021/results/qf-dt-single-query.html b/archive/2021/results/qf-dt-single-query.html index 18d80189..06ea18b7 100644 --- a/archive/2021/results/qf-dt-single-query.html +++ b/archive/2021/results/qf-dt-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_DT (Single Query Track)

    Competition results for the QF_DT - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_DT (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    QF_DT (Single Query Track)

    - + z3n 0 104 @@ -142,7 +142,7 @@

    QF_DT (Single Query Track)

    - + 2020-z3n 0 100 @@ -153,7 +153,7 @@

    QF_DT (Single Query Track)

    - + cvc5 0 88 @@ -164,7 +164,7 @@

    QF_DT (Single Query Track)

    - + SMTInterpol 0 20 @@ -186,7 +186,7 @@

    QF_DT (Single Query Track)

    - + z3n 0 104177949.943177955.2431044460100100 @@ -195,7 +195,7 @@

    QF_DT (Single Query Track)

    - + 2020-z3n 0 100177881.607177884.0351004159104104 @@ -204,7 +204,7 @@

    QF_DT (Single Query Track)

    - + cvc5 0 88188403.176188410.469882365116116 @@ -213,7 +213,7 @@

    QF_DT (Single Query Track)

    - + SMTInterpol 0 22231540.165230489.21222319182182 @@ -233,7 +233,7 @@

    QF_DT (Single Query Track)

    - + z3n 0 4494507.61294509.6354444059101100 @@ -242,7 +242,7 @@

    QF_DT (Single Query Track)

    - + 2020-z3n 0 4195509.81595511.8894141062101104 @@ -251,7 +251,7 @@

    QF_DT (Single Query Track)

    - + cvc5 0 23106512.453106514.5842323080101116 @@ -260,7 +260,7 @@

    QF_DT (Single Query Track)

    - + SMTInterpol 0 3120001.327120000.925330100101182 @@ -280,7 +280,7 @@

    QF_DT (Single Query Track)

    - + cvc5 0 6581890.72381895.8856506536103116 @@ -289,7 +289,7 @@

    QF_DT (Single Query Track)

    - + z3n 0 6083442.33183445.6086006041103100 @@ -298,7 +298,7 @@

    QF_DT (Single Query Track)

    - + 2020-z3n 0 5982371.79382372.1465905942103104 @@ -307,7 +307,7 @@

    QF_DT (Single Query Track)

    - + SMTInterpol 0 19111538.838110488.2881901982103182 @@ -327,7 +327,7 @@

    QF_DT (Single Query Track)

    - + z3n 0 64784.694784.692642198198 @@ -336,7 +336,7 @@

    QF_DT (Single Query Track)

    - + 2020-z3n 0 54789.3944786.613532199199 @@ -345,7 +345,7 @@

    QF_DT (Single Query Track)

    - + cvc5 0 44817.2834817.288431200200 @@ -354,7 +354,7 @@

    QF_DT (Single Query Track)

    - + SMTInterpol 0 44819.7554818.351431200200 @@ -378,7 +378,6 @@

    QF_DT (Single Query Track)

    - + - diff --git a/archive/2021/results/qf-dt-unsat-core.html b/archive/2021/results/qf-dt-unsat-core.html index 101141dd..26b5fcc3 100644 --- a/archive/2021/results/qf-dt-unsat-core.html +++ b/archive/2021/results/qf-dt-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_DT (Unsat Core Track)

    Competition results for the QF_DT - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_DT (Unsat Core Track)

    Sequential PerformanceParallel Performance SMTInterpolSMTInterpol - - + + @@ -126,7 +126,7 @@

    QF_DT (Unsat Core Track)

    - + z3n 0 182036 @@ -137,7 +137,7 @@

    QF_DT (Unsat Core Track)

    - + SMTInterpol 0 108156 @@ -148,7 +148,7 @@

    QF_DT (Unsat Core Track)

    - + SMTInterpol-remus 0 108156 @@ -159,7 +159,7 @@

    QF_DT (Unsat Core Track)

    - + cvc5-uc 0 68043 @@ -181,7 +181,7 @@

    QF_DT (Unsat Core Track)

    - + z3n 0 182036100930.518100935.15776 @@ -190,7 +190,7 @@

    QF_DT (Unsat Core Track)

    - + SMTInterpol 0 114815111802.89110720.1282 @@ -199,7 +199,7 @@

    QF_DT (Unsat Core Track)

    - + SMTInterpol-remus 0 114815112854.142111636.17882 @@ -208,7 +208,7 @@

    QF_DT (Unsat Core Track)

    - + cvc5-uc 0 68043112175.591112176.0388 @@ -232,7 +232,6 @@

    QF_DT (Unsat Core Track)

    - + - diff --git a/archive/2021/results/qf-equality-bitvec-cloud.html b/archive/2021/results/qf-equality-bitvec-cloud.html index 5d32e3d1..131d042d 100644 --- a/archive/2021/results/qf-equality-bitvec-cloud.html +++ b/archive/2021/results/qf-equality-bitvec-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Equality+Bitvec (Cloud Track)

    Competition results for the QF_Equality+Bitvec - + division - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    QF_Equality+Bitvec (Cloud Track)

    - + Par4n 0 615363.59661510010 @@ -126,7 +126,7 @@

    QF_Equality+Bitvec (Cloud Track)

    - + cvc5-gg 0 016800.000016014 @@ -146,7 +146,7 @@

    QF_Equality+Bitvec (Cloud Track)

    - + Par4n 0 11369.44811011410 @@ -155,7 +155,7 @@

    QF_Equality+Bitvec (Cloud Track)

    - + cvc5-gg 0 01200.000021414 @@ -175,7 +175,7 @@

    QF_Equality+Bitvec (Cloud Track)

    - + Par4n 0 513994.1485059210 @@ -184,7 +184,7 @@

    QF_Equality+Bitvec (Cloud Track)

    - + cvc5-gg 0 015600.000014214 @@ -204,7 +204,7 @@

    QF_Equality+Bitvec (Cloud Track)

    - + cvc5-gg 0 0336.000016014 @@ -213,7 +213,7 @@

    QF_Equality+Bitvec (Cloud Track)

    - + Par4n 0 0384.000016016 @@ -237,7 +237,6 @@

    QF_Equality+Bitvec (Cloud Track)

    - + - diff --git a/archive/2021/results/qf-equality-bitvec-incremental.html b/archive/2021/results/qf-equality-bitvec-incremental.html index 99e5d90a..549af2d2 100644 --- a/archive/2021/results/qf-equality-bitvec-incremental.html +++ b/archive/2021/results/qf-equality-bitvec-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Equality+Bitvec (Incremental Track)

    Competition results for the QF_Equality+Bitvec - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_Equality+Bitvec (Incremental Track)

    Parallel Performance Yices2 incremental - - + + @@ -124,7 +124,7 @@

    QF_Equality+Bitvec (Incremental Track)

    - + 2020-Yices2 incrementaln 0 561934852.92134786.5253022 @@ -133,7 +133,7 @@

    QF_Equality+Bitvec (Incremental Track)

    - + Yices2 incremental 0 561935108.56534988.72553022 @@ -142,7 +142,7 @@

    QF_Equality+Bitvec (Incremental Track)

    - + MathSAT5n 0 528858964.65258802.19384041 @@ -151,7 +151,7 @@

    QF_Equality+Bitvec (Incremental Track)

    - + Bitwuzla 0 479850085.94449989.19874027 @@ -160,7 +160,7 @@

    QF_Equality+Bitvec (Incremental Track)

    - + 2020-Bitwuzla-fixedn 0 479651931.83551866.675876029 @@ -169,7 +169,7 @@

    QF_Equality+Bitvec (Incremental Track)

    - + cvc5-inc 0 436684569.25284412.0391306058 @@ -178,7 +178,7 @@

    QF_Equality+Bitvec (Incremental Track)

    - + z3n 0 3948251969.341251918.15717240188 @@ -202,7 +202,6 @@

    QF_Equality+Bitvec (Incremental Track)

    - + - diff --git a/archive/2021/results/qf-equality-bitvec-model-validation.html b/archive/2021/results/qf-equality-bitvec-model-validation.html index 2192b1f6..3a483d1c 100644 --- a/archive/2021/results/qf-equality-bitvec-model-validation.html +++ b/archive/2021/results/qf-equality-bitvec-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Equality+Bitvec (Model Validation Track)

    Competition results for the QF_Equality+Bitvec - + division - + in the Model Validation Track.

    @@ -119,7 +119,7 @@

    QF_Equality+Bitvec (Model Validation Track)

    - + Yices2 model-validation 0 374 @@ -130,7 +130,7 @@

    QF_Equality+Bitvec (Model Validation Track)

    - + Bitwuzla 0 374 @@ -141,7 +141,7 @@

    QF_Equality+Bitvec (Model Validation Track)

    - + MathSAT5n 0 362 @@ -152,7 +152,7 @@

    QF_Equality+Bitvec (Model Validation Track)

    - + cvc5-mv 0 361 @@ -163,7 +163,7 @@

    QF_Equality+Bitvec (Model Validation Track)

    - + z3-mvn 0 358 @@ -185,7 +185,7 @@

    QF_Equality+Bitvec (Model Validation Track)

    - + Yices2 model-validation 0 3742185.6242186.3091 @@ -194,7 +194,7 @@

    QF_Equality+Bitvec (Model Validation Track)

    - + Bitwuzla 0 3742616.9572574.3021 @@ -203,7 +203,7 @@

    QF_Equality+Bitvec (Model Validation Track)

    - + MathSAT5n 0 36216909.55116910.78813 @@ -212,7 +212,7 @@

    QF_Equality+Bitvec (Model Validation Track)

    - + cvc5-mv 0 36126221.66226218.60714 @@ -221,7 +221,7 @@

    QF_Equality+Bitvec (Model Validation Track)

    - + z3-mvn 0 35823534.36323524.38517 @@ -245,7 +245,6 @@

    QF_Equality+Bitvec (Model Validation Track)

    - + - diff --git a/archive/2021/results/qf-equality-bitvec-parallel.html b/archive/2021/results/qf-equality-bitvec-parallel.html index de2e38b9..65f79576 100644 --- a/archive/2021/results/qf-equality-bitvec-parallel.html +++ b/archive/2021/results/qf-equality-bitvec-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Equality+Bitvec (Parallel Track)

    Competition results for the QF_Equality+Bitvec - + division - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    QF_Equality+Bitvec (Parallel Track)

    - + Par4n 0 516433.61151412012 @@ -126,7 +126,7 @@

    QF_Equality+Bitvec (Parallel Track)

    - + cvc5-gg 0 012000.000017010 @@ -146,7 +146,7 @@

    QF_Equality+Bitvec (Parallel Track)

    - + Par4n 0 11380.73911011512 @@ -155,7 +155,7 @@

    QF_Equality+Bitvec (Parallel Track)

    - + cvc5-gg 0 01200.000021510 @@ -175,7 +175,7 @@

    QF_Equality+Bitvec (Parallel Track)

    - + Par4n 0 415052.87240411212 @@ -184,7 +184,7 @@

    QF_Equality+Bitvec (Parallel Track)

    - + cvc5-gg 0 010800.000015210 @@ -204,7 +204,7 @@

    QF_Equality+Bitvec (Parallel Track)

    - + cvc5-gg 0 0240.000017010 @@ -213,7 +213,7 @@

    QF_Equality+Bitvec (Parallel Track)

    - + Par4n 0 0408.000017017 @@ -237,7 +237,6 @@

    QF_Equality+Bitvec (Parallel Track)

    - + - diff --git a/archive/2021/results/qf-equality-bitvec-single-query.html b/archive/2021/results/qf-equality-bitvec-single-query.html index 5718a8cf..a357604b 100644 --- a/archive/2021/results/qf-equality-bitvec-single-query.html +++ b/archive/2021/results/qf-equality-bitvec-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Equality+Bitvec (Single Query Track)

    Competition results for the QF_Equality+Bitvec - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_Equality+Bitvec (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) BitwuzlaBitwuzlaBitwuzla - - + + Bitwuzla - - + + Bitwuzla - + @@ -131,7 +131,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + 2020-Bitwuzlan 0 3168 @@ -142,7 +142,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + Bitwuzla 0 3168 @@ -153,7 +153,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + 2020-Yices2n 0 3132 @@ -164,7 +164,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + 2020-Yices2-fixedn 0 3132 @@ -175,7 +175,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + Yices2 0 3128 @@ -186,7 +186,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + MathSAT5n 0 3067 @@ -197,7 +197,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + z3n 0 3047 @@ -208,7 +208,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + cvc5 0 3042 @@ -230,7 +230,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + 2020-Bitwuzlan 0 316861849.83661805.81231682095107353030 @@ -239,7 +239,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + Bitwuzla 0 316887446.39987368.09931682094107453038 @@ -248,7 +248,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + 2020-Yices2n 0 3132159277.492159288.6731322093103989089 @@ -257,7 +257,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + 2020-Yices2-fixedn 0 3132159928.558159881.62931322093103989089 @@ -266,7 +266,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + Yices2 0 3128159784.393159748.50431282093103593093 @@ -275,7 +275,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + MathSAT5n 0 3067231474.284231430.7433067204510221540153 @@ -284,7 +284,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + z3n 0 3047251309.848251286.863047202710201740174 @@ -293,7 +293,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + cvc5 0 3042267544.514267659.3833042202010221790179 @@ -313,7 +313,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + 2020-Bitwuzlan 0 209511777.40211758.5032095209509111730 @@ -322,7 +322,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + Bitwuzla 0 209411699.23511634.48920942094010111738 @@ -331,7 +331,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + 2020-Yices2n 0 209327298.75127300.79620932093011111789 @@ -340,7 +340,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + 2020-Yices2-fixedn 0 209327397.44927393.97220932093011111789 @@ -349,7 +349,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + Yices2 0 209327464.96627477.04220932093011111793 @@ -358,7 +358,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + MathSAT5n 0 204584797.61884786.679204520450591117153 @@ -367,7 +367,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + z3n 0 2027106766.483106738.766202720270771117174 @@ -376,7 +376,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + cvc5 0 2020116714.068116833.232202020200841117179 @@ -396,7 +396,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + Bitwuzla 0 107470947.16470933.60910740107439210838 @@ -405,7 +405,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + 2020-Bitwuzlan 0 107345272.43545247.3110730107340210830 @@ -414,7 +414,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + 2020-Yices2n 0 1039127178.741127187.87410390103974210889 @@ -423,7 +423,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + 2020-Yices2-fixedn 0 1039127731.109127687.65810390103974210889 @@ -432,7 +432,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + Yices2 0 1035127519.427127471.46210350103578210893 @@ -441,7 +441,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + MathSAT5n 0 1022141876.666141844.064102201022912108153 @@ -450,7 +450,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + cvc5 0 1022146030.446146026.152102201022912108179 @@ -459,7 +459,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + z3n 0 1020139743.365139748.094102001020932108174 @@ -479,7 +479,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + Bitwuzla 0 30046840.6926789.064300420299752170202 @@ -488,7 +488,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + 2020-Bitwuzlan 0 30026631.5696634.803300220239792190196 @@ -497,7 +497,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + 2020-Yices2-fixedn 0 29857236.4697234.425298520409452360236 @@ -506,7 +506,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + 2020-Yices2n 0 29857232.0547235.545298520409452360236 @@ -515,7 +515,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + Yices2 0 29857242.187256.884298520409452360236 @@ -524,7 +524,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + MathSAT5n 0 289310763.88810710.986289319719223280327 @@ -533,7 +533,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + z3n 0 286610768.99510739.066286619539133550355 @@ -542,7 +542,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + cvc5 0 283814342.34714244.26283819289103830383 @@ -566,7 +566,6 @@

    QF_Equality+Bitvec (Single Query Track)

    - + - diff --git a/archive/2021/results/qf-equality-bitvec-unsat-core.html b/archive/2021/results/qf-equality-bitvec-unsat-core.html index e8d365d2..77be8db0 100644 --- a/archive/2021/results/qf-equality-bitvec-unsat-core.html +++ b/archive/2021/results/qf-equality-bitvec-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Equality+Bitvec (Unsat Core Track)

    Competition results for the QF_Equality+Bitvec - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_Equality+Bitvec (Unsat Core Track)

    Sequential PerformanceParallel Performance BitwuzlaBitwuzla - - + + @@ -126,7 +126,7 @@

    QF_Equality+Bitvec (Unsat Core Track)

    - + Bitwuzla 0 1150402 @@ -137,7 +137,7 @@

    QF_Equality+Bitvec (Unsat Core Track)

    - + cvc5-uc 0 1070957 @@ -148,7 +148,7 @@

    QF_Equality+Bitvec (Unsat Core Track)

    - + z3n 0 772472 @@ -159,7 +159,7 @@

    QF_Equality+Bitvec (Unsat Core Track)

    - + 2020-Yices2n 0 771830 @@ -170,7 +170,7 @@

    QF_Equality+Bitvec (Unsat Core Track)

    - + Yices2 0 771830 @@ -181,7 +181,7 @@

    QF_Equality+Bitvec (Unsat Core Track)

    - + 2020-z3n 0 767877 @@ -192,7 +192,7 @@

    QF_Equality+Bitvec (Unsat Core Track)

    - + MathSAT5n 0 35 @@ -214,7 +214,7 @@

    QF_Equality+Bitvec (Unsat Core Track)

    - + Bitwuzla 0 115040253460.45253470.60821 @@ -223,7 +223,7 @@

    QF_Equality+Bitvec (Unsat Core Track)

    - + cvc5-uc 0 107095790348.16890333.42253 @@ -232,7 +232,7 @@

    QF_Equality+Bitvec (Unsat Core Track)

    - + z3n 0 772472129464.921129690.52798 @@ -241,7 +241,7 @@

    QF_Equality+Bitvec (Unsat Core Track)

    - + 2020-Yices2n 0 771830112576.721112598.70882 @@ -250,7 +250,7 @@

    QF_Equality+Bitvec (Unsat Core Track)

    - + Yices2 0 771830112986.849112904.48682 @@ -259,7 +259,7 @@

    QF_Equality+Bitvec (Unsat Core Track)

    - + 2020-z3n 0 767877124126.074124116.48696 @@ -268,7 +268,7 @@

    QF_Equality+Bitvec (Unsat Core Track)

    - + MathSAT5n 0 3577.86379.7560 @@ -292,7 +292,6 @@

    QF_Equality+Bitvec (Unsat Core Track)

    - + - diff --git a/archive/2021/results/qf-equality-incremental.html b/archive/2021/results/qf-equality-incremental.html index 859b6759..ec16b944 100644 --- a/archive/2021/results/qf-equality-incremental.html +++ b/archive/2021/results/qf-equality-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Equality (Incremental Track)

    Competition results for the QF_Equality - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_Equality (Incremental Track)

    Parallel Performance cvc5-inc - - + + @@ -124,7 +124,7 @@

    QF_Equality (Incremental Track)

    - + z3n 0 14209270.456249.505000 @@ -133,7 +133,7 @@

    QF_Equality (Incremental Track)

    - + cvc5-inc 0 14209771.867716.1000 @@ -142,7 +142,7 @@

    QF_Equality (Incremental Track)

    - + SMTInterpol 0 142093593.4421307.271000 @@ -151,7 +151,7 @@

    QF_Equality (Incremental Track)

    - + 2019-Yices 2.6.2 Incrementaln 0 81097.21483.1071339900 @@ -160,7 +160,7 @@

    QF_Equality (Incremental Track)

    - + Yices2 incremental 0 810104.19489.8911339900 @@ -169,7 +169,7 @@

    QF_Equality (Incremental Track)

    - + OpenSMT 0 8092871.0092837.3611340001 @@ -178,7 +178,7 @@

    QF_Equality (Incremental Track)

    - + MathSAT5n 0 762184.494169.7331344700 @@ -202,7 +202,6 @@

    QF_Equality (Incremental Track)

    - + - diff --git a/archive/2021/results/qf-equality-lineararith-incremental.html b/archive/2021/results/qf-equality-lineararith-incremental.html index 72bba6de..b38b57af 100644 --- a/archive/2021/results/qf-equality-lineararith-incremental.html +++ b/archive/2021/results/qf-equality-lineararith-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Equality+LinearArith (Incremental Track)

    Competition results for the QF_Equality+LinearArith - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_Equality+LinearArith (Incremental Track)

    Parallel Performance cvc5-inc - - + + @@ -124,7 +124,7 @@

    QF_Equality+LinearArith (Incremental Track)

    - + 2020-z3n 0 5673796106126.094105517.18264665068 @@ -133,7 +133,7 @@

    QF_Equality+LinearArith (Incremental Track)

    - + z3n 0 5547234230193.276229737.6091912270150 @@ -142,7 +142,7 @@

    QF_Equality+LinearArith (Incremental Track)

    - + cvc5-inc 0 4918703175424.176174391.5468197580103 @@ -151,7 +151,7 @@

    QF_Equality+LinearArith (Incremental Track)

    - + SMTInterpol 0 4816541204729.747188208.1499219200122 @@ -160,7 +160,7 @@

    QF_Equality+LinearArith (Incremental Track)

    - + 2018-Yices (incremental)n 0 46998643249.8493040.414000 @@ -169,7 +169,7 @@

    QF_Equality+LinearArith (Incremental Track)

    - + Yices2 incremental 0 4079606112597.221112010.0181658855077 @@ -178,7 +178,7 @@

    QF_Equality+LinearArith (Incremental Track)

    - + MathSAT5n 0 3050382145383.663144812.88726880790105 @@ -187,7 +187,7 @@

    QF_Equality+LinearArith (Incremental Track)

    - + 2018-Z3 (incremental)n 0 102053484331.72284219.4464958062 @@ -211,7 +211,6 @@

    QF_Equality+LinearArith (Incremental Track)

    - + - diff --git a/archive/2021/results/qf-equality-lineararith-model-validation.html b/archive/2021/results/qf-equality-lineararith-model-validation.html index b28288a3..1a0c5592 100644 --- a/archive/2021/results/qf-equality-lineararith-model-validation.html +++ b/archive/2021/results/qf-equality-lineararith-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Equality+LinearArith (Model Validation Track)

    Competition results for the QF_Equality+LinearArith - + division - + in the Model Validation Track.

    @@ -119,7 +119,7 @@

    QF_Equality+LinearArith (Model Validation Track)

    - + SMTInterpol 0 881 @@ -130,7 +130,7 @@

    QF_Equality+LinearArith (Model Validation Track)

    - + cvc5-mv 0 850 @@ -141,7 +141,7 @@

    QF_Equality+LinearArith (Model Validation Track)

    - + Yices2 model-validation 0 823 @@ -152,7 +152,7 @@

    QF_Equality+LinearArith (Model Validation Track)

    - + z3-mvn 0 715 @@ -163,7 +163,7 @@

    QF_Equality+LinearArith (Model Validation Track)

    - + MathSAT5n 0 553 @@ -185,7 +185,7 @@

    QF_Equality+LinearArith (Model Validation Track)

    - + SMTInterpol 0 88228458.24624619.2149 @@ -194,7 +194,7 @@

    QF_Equality+LinearArith (Model Validation Track)

    - + cvc5-mv 0 85074194.32374199.75741 @@ -203,7 +203,7 @@

    QF_Equality+LinearArith (Model Validation Track)

    - + Yices2 model-validation 0 82395685.67495685.63368 @@ -212,7 +212,7 @@

    QF_Equality+LinearArith (Model Validation Track)

    - + z3-mvn 0 71561807.14361574.98743 @@ -221,7 +221,7 @@

    QF_Equality+LinearArith (Model Validation Track)

    - + MathSAT5n 0 553124781.208124779.912102 @@ -245,7 +245,6 @@

    QF_Equality+LinearArith (Model Validation Track)

    - + - diff --git a/archive/2021/results/qf-equality-lineararith-single-query.html b/archive/2021/results/qf-equality-lineararith-single-query.html index cf8c0a88..ef7676ea 100644 --- a/archive/2021/results/qf-equality-lineararith-single-query.html +++ b/archive/2021/results/qf-equality-lineararith-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Equality+LinearArith (Single Query Track)

    Competition results for the QF_Equality+LinearArith - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_Equality+LinearArith (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) SMTInterpolSMTInterpolSMTInterpol - - + + cvc5 - - + + SMTInterpol - + @@ -131,7 +131,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + SMTInterpol 0 1902 @@ -142,7 +142,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + cvc5 - fixedn 0 1887 @@ -153,7 +153,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + cvc5 0 1884 @@ -164,7 +164,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + z3n 0 1751 @@ -175,7 +175,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + Yices2 0 1750 @@ -186,7 +186,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + MathSAT5n 0 1735 @@ -197,7 +197,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + veriT 0 1064 @@ -208,7 +208,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + 2019-Yices 2.6.2n 0 912 @@ -219,7 +219,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + 2019-SMTInterpoln 0 539 @@ -230,7 +230,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + mc2 0 522 @@ -241,7 +241,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + 2018-Yicesn 0 300 @@ -263,7 +263,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + SMTInterpol 0 190393094.45983733.7581903105385058049 @@ -272,7 +272,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + cvc5 - fixedn 0 1887128249.131128251.3841887103785074074 @@ -281,7 +281,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + cvc5 0 1884132085.57132084.8471884103385177077 @@ -290,7 +290,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + z3n 0 1751101154.325101120.34517519527996414664 @@ -299,7 +299,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + Yices2 0 175097699.1497707.22417509557956514665 @@ -308,7 +308,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + MathSAT5n 0 1735115635.191115625.30317359328038014680 @@ -317,7 +317,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + veriT 0 1064140527.095140522.797106460446075114689 @@ -326,7 +326,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + 2019-Yices 2.6.2n 0 91296055.12396057.9029123915216298762 @@ -335,7 +335,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + 2019-SMTInterpoln 0 5395163.6623823.686539326213214202 @@ -344,7 +344,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + mc2 0 5221326.4721326.8055223142081914200 @@ -353,7 +353,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + 2018-Yicesn 0 30067.68564.85830023862016610 @@ -373,7 +373,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + SMTInterpol 0 105311566.3898032.457105310530790149 @@ -382,7 +382,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + cvc5 - fixedn 0 103740980.52840982.6381037103702390174 @@ -391,7 +391,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + cvc5 0 103345022.27545023.4941033103302790177 @@ -400,7 +400,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + Yices2 0 95538481.39338486.72595595502598165 @@ -409,7 +409,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + z3n 0 95239344.98939311.04895295202898164 @@ -418,7 +418,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + MathSAT5n 0 93259735.72659729.96793293204898180 @@ -427,7 +427,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + veriT 0 60447802.54547797.842604604037698189 @@ -436,7 +436,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + 2019-Yices 2.6.2n 0 39137551.4937553.531391391024154662 @@ -445,7 +445,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + 2019-SMTInterpoln 0 3262947.9182101.653263260116342 @@ -454,7 +454,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + mc2 0 314832.002832.21831431401316340 @@ -463,7 +463,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + 2018-Yicesn 0 2385.8736.1842382380017230 @@ -483,7 +483,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + cvc5 0 85185863.29585861.353851085149106177 @@ -492,7 +492,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + SMTInterpol 0 85080328.0774501.301850085050106149 @@ -501,7 +501,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + cvc5 - fixedn 0 85086068.60386068.746850085050106174 @@ -510,7 +510,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + MathSAT5n 0 80354699.46454695.336803080331112780 @@ -519,7 +519,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + z3n 0 79960609.33560609.297799079935112764 @@ -528,7 +528,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + Yices2 0 79558017.74758020.499795079539112765 @@ -537,7 +537,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + 2019-Yices 2.6.2n 0 52158503.63458504.371521052138140262 @@ -546,7 +546,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + veriT 0 46091524.5591524.9554600460374112789 @@ -555,7 +555,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + 2019-SMTInterpoln 0 2131015.744522.0362130213017482 @@ -564,7 +564,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + mc2 0 208408.103408.2162080208517480 @@ -573,7 +573,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + 2018-Yicesn 0 6261.81358.67362062018990 @@ -593,7 +593,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + SMTInterpol 0 181510459.0086194.297181510227931460137 @@ -602,7 +602,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + cvc5 0 17566125.2546117.82617569907662050205 @@ -611,7 +611,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + cvc5 - fixedn 0 17566127.6766123.35917569907662050205 @@ -620,7 +620,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + Yices2 0 17062920.8592927.1311706932774109146109 @@ -629,7 +629,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + z3n 0 16893804.1273796.1631689927762126146126 @@ -638,7 +638,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + MathSAT5n 0 16624722.8834709.9451662918744153146153 @@ -647,7 +647,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + veriT 0 9626600.2876593.074962526436853146213 @@ -656,7 +656,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + 2019-Yices 2.6.2n 0 8692714.7262715.176869369500105987105 @@ -665,7 +665,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + 2019-SMTInterpoln 0 5311960.273892.37253132121010142010 @@ -674,7 +674,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + mc2 0 521502.539502.75952131420720142013 @@ -683,7 +683,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + 2018-Yicesn 0 29954.97952.14829923861116611 @@ -707,7 +707,6 @@

    QF_Equality+LinearArith (Single Query Track)

    - + - diff --git a/archive/2021/results/qf-equality-lineararith-unsat-core.html b/archive/2021/results/qf-equality-lineararith-unsat-core.html index a9ee03bb..e277d9f9 100644 --- a/archive/2021/results/qf-equality-lineararith-unsat-core.html +++ b/archive/2021/results/qf-equality-lineararith-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Equality+LinearArith (Unsat Core Track)

    Competition results for the QF_Equality+LinearArith - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_Equality+LinearArith (Unsat Core Track)

    Sequential PerformanceParallel Performance Yices2Yices2 - - + + @@ -126,7 +126,7 @@

    QF_Equality+LinearArith (Unsat Core Track)

    - + MathSAT5n 0 858722 @@ -137,7 +137,7 @@

    QF_Equality+LinearArith (Unsat Core Track)

    - + 2020-Yices2-fixedn 0 814234 @@ -148,7 +148,7 @@

    QF_Equality+LinearArith (Unsat Core Track)

    - + Yices2 0 814234 @@ -159,7 +159,7 @@

    QF_Equality+LinearArith (Unsat Core Track)

    - + 2020-z3n 0 456356 @@ -170,7 +170,7 @@

    QF_Equality+LinearArith (Unsat Core Track)

    - + SMTInterpol 0 371701 @@ -181,7 +181,7 @@

    QF_Equality+LinearArith (Unsat Core Track)

    - + SMTInterpol-remus 0 365439 @@ -192,7 +192,7 @@

    QF_Equality+LinearArith (Unsat Core Track)

    - + z3n 0 333851 @@ -203,7 +203,7 @@

    QF_Equality+LinearArith (Unsat Core Track)

    - + 2020-CVC4-ucn 0 204838 @@ -214,7 +214,7 @@

    QF_Equality+LinearArith (Unsat Core Track)

    - + cvc5-uc 0 45192 @@ -236,7 +236,7 @@

    QF_Equality+LinearArith (Unsat Core Track)

    - + MathSAT5n 0 85872259827.47759838.4332 @@ -245,7 +245,7 @@

    QF_Equality+LinearArith (Unsat Core Track)

    - + 2020-Yices2-fixedn 0 81423474040.10374046.44942 @@ -254,7 +254,7 @@

    QF_Equality+LinearArith (Unsat Core Track)

    - + Yices2 0 81423475155.61275161.04442 @@ -263,7 +263,7 @@

    QF_Equality+LinearArith (Unsat Core Track)

    - + 2020-z3n 0 45635685549.21785551.95159 @@ -272,7 +272,7 @@

    QF_Equality+LinearArith (Unsat Core Track)

    - + SMTInterpol-remus 0 396803161131.678152651.43769 @@ -281,7 +281,7 @@

    QF_Equality+LinearArith (Unsat Core Track)

    - + SMTInterpol 0 382053102583.86698907.36169 @@ -290,7 +290,7 @@

    QF_Equality+LinearArith (Unsat Core Track)

    - + z3n 0 333851102089.356102095.45966 @@ -299,7 +299,7 @@

    QF_Equality+LinearArith (Unsat Core Track)

    - + 2020-CVC4-ucn 0 204838102566.948102567.47169 @@ -308,7 +308,7 @@

    QF_Equality+LinearArith (Unsat Core Track)

    - + cvc5-uc 0 45192120287.925120286.67395 @@ -332,7 +332,6 @@

    QF_Equality+LinearArith (Unsat Core Track)

    - + - diff --git a/archive/2021/results/qf-equality-model-validation.html b/archive/2021/results/qf-equality-model-validation.html index 718075c3..5007c2e9 100644 --- a/archive/2021/results/qf-equality-model-validation.html +++ b/archive/2021/results/qf-equality-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Equality (Model Validation Track)

    Competition results for the QF_Equality - + division - + in the Model Validation Track.

    @@ -119,7 +119,7 @@

    QF_Equality (Model Validation Track)

    - + Yices2 model-validation 0 1571 @@ -130,7 +130,7 @@

    QF_Equality (Model Validation Track)

    - + OpenSMT 0 1571 @@ -141,7 +141,7 @@

    QF_Equality (Model Validation Track)

    - + cvc5-mv 0 1571 @@ -152,7 +152,7 @@

    QF_Equality (Model Validation Track)

    - + SMTInterpol 0 1571 @@ -163,7 +163,7 @@

    QF_Equality (Model Validation Track)

    - + z3-mvn 0 1555 @@ -174,7 +174,7 @@

    QF_Equality (Model Validation Track)

    - + MathSAT5n 0 640 @@ -196,7 +196,7 @@

    QF_Equality (Model Validation Track)

    - + Yices2 model-validation 0 157173.09877.2240 @@ -205,7 +205,7 @@

    QF_Equality (Model Validation Track)

    - + OpenSMT 0 1571562.781572.4290 @@ -214,7 +214,7 @@

    QF_Equality (Model Validation Track)

    - + cvc5-mv 0 1571731.025728.4360 @@ -223,7 +223,7 @@

    QF_Equality (Model Validation Track)

    - + SMTInterpol 0 15714841.7951974.7910 @@ -232,7 +232,7 @@

    QF_Equality (Model Validation Track)

    - + z3-mvn 0 1555243.214232.2620 @@ -241,7 +241,7 @@

    QF_Equality (Model Validation Track)

    - + MathSAT5n 0 640227.882226.9470 @@ -265,7 +265,6 @@

    QF_Equality (Model Validation Track)

    - + - diff --git a/archive/2021/results/qf-equality-nonlineararith-cloud.html b/archive/2021/results/qf-equality-nonlineararith-cloud.html index 735305d5..c32943fa 100644 --- a/archive/2021/results/qf-equality-nonlineararith-cloud.html +++ b/archive/2021/results/qf-equality-nonlineararith-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Equality+NonLinearArith (Cloud Track)

    Competition results for the QF_Equality+NonLinearArith - + division - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    QF_Equality+NonLinearArith (Cloud Track)

    - + Par4n 0 23748.897220202 @@ -126,7 +126,7 @@

    QF_Equality+NonLinearArith (Cloud Track)

    - + cvc5-gg 0 03600.0000403 @@ -146,7 +146,7 @@

    QF_Equality+NonLinearArith (Cloud Track)

    - + Par4n 0 22548.897220112 @@ -155,7 +155,7 @@

    QF_Equality+NonLinearArith (Cloud Track)

    - + cvc5-gg 0 02400.0000313 @@ -175,7 +175,7 @@

    QF_Equality+NonLinearArith (Cloud Track)

    - + cvc5-gg 0 00.0000043 @@ -184,7 +184,7 @@

    QF_Equality+NonLinearArith (Cloud Track)

    - + Par4n 0 00.0000042 @@ -204,7 +204,7 @@

    QF_Equality+NonLinearArith (Cloud Track)

    - + cvc5-gg 0 072.0000403 @@ -213,7 +213,7 @@

    QF_Equality+NonLinearArith (Cloud Track)

    - + Par4n 0 096.0000404 @@ -237,7 +237,6 @@

    QF_Equality+NonLinearArith (Cloud Track)

    - + - diff --git a/archive/2021/results/qf-equality-nonlineararith-incremental.html b/archive/2021/results/qf-equality-nonlineararith-incremental.html index cc58afc9..3750ec78 100644 --- a/archive/2021/results/qf-equality-nonlineararith-incremental.html +++ b/archive/2021/results/qf-equality-nonlineararith-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Equality+NonLinearArith (Incremental Track)

    Competition results for the QF_Equality+NonLinearArith - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_Equality+NonLinearArith (Incremental Track)

    Parallel Performance SMTInterpol - - + + @@ -124,7 +124,7 @@

    QF_Equality+NonLinearArith (Incremental Track)

    - + z3n 0 150310164.795154.451200 @@ -133,7 +133,7 @@

    QF_Equality+NonLinearArith (Incremental Track)

    - + SMTInterpol 0 1482181851.736719.86209400 @@ -142,7 +142,7 @@

    QF_Equality+NonLinearArith (Incremental Track)

    - + cvc5-inc 0 1420172525.0482507.305829501 @@ -151,7 +151,7 @@

    QF_Equality+NonLinearArith (Incremental Track)

    - + 2020-MathSAT5n 0 9667492.59857.0375363200 @@ -160,7 +160,7 @@

    QF_Equality+NonLinearArith (Incremental Track)

    - + MathSAT5n 0 9667456.06959.0515363800 @@ -169,7 +169,7 @@

    QF_Equality+NonLinearArith (Incremental Track)

    - + Yices2 incremental 0 191801201.7491205.9627940201 @@ -193,7 +193,6 @@

    QF_Equality+NonLinearArith (Incremental Track)

    - + - diff --git a/archive/2021/results/qf-equality-nonlineararith-parallel.html b/archive/2021/results/qf-equality-nonlineararith-parallel.html index 9a9a80c6..ddaa0a7c 100644 --- a/archive/2021/results/qf-equality-nonlineararith-parallel.html +++ b/archive/2021/results/qf-equality-nonlineararith-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Equality+NonLinearArith (Parallel Track)

    Competition results for the QF_Equality+NonLinearArith - + division - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    QF_Equality+NonLinearArith (Parallel Track)

    - + Par4n 0 23826.779220202 @@ -126,7 +126,7 @@

    QF_Equality+NonLinearArith (Parallel Track)

    - + cvc5-gg 0 04800.0000404 @@ -146,7 +146,7 @@

    QF_Equality+NonLinearArith (Parallel Track)

    - + Par4n 0 22626.779220112 @@ -155,7 +155,7 @@

    QF_Equality+NonLinearArith (Parallel Track)

    - + cvc5-gg 0 03600.0000314 @@ -175,7 +175,7 @@

    QF_Equality+NonLinearArith (Parallel Track)

    - + cvc5-gg 0 00.0000044 @@ -184,7 +184,7 @@

    QF_Equality+NonLinearArith (Parallel Track)

    - + Par4n 0 00.0000042 @@ -204,7 +204,7 @@

    QF_Equality+NonLinearArith (Parallel Track)

    - + cvc5-gg 0 096.0000404 @@ -213,7 +213,7 @@

    QF_Equality+NonLinearArith (Parallel Track)

    - + Par4n 0 096.0000404 @@ -237,7 +237,6 @@

    QF_Equality+NonLinearArith (Parallel Track)

    - + - diff --git a/archive/2021/results/qf-equality-nonlineararith-single-query.html b/archive/2021/results/qf-equality-nonlineararith-single-query.html index b5927712..491fb562 100644 --- a/archive/2021/results/qf-equality-nonlineararith-single-query.html +++ b/archive/2021/results/qf-equality-nonlineararith-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Equality+NonLinearArith (Single Query Track)

    Competition results for the QF_Equality+NonLinearArith - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_Equality+NonLinearArith (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + 2020-CVC4n 0 397 @@ -142,7 +142,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + z3n 0 377 @@ -153,7 +153,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + 2019-CVC4n 0 372 @@ -164,7 +164,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + cvc5 - fixedn 0 368 @@ -175,7 +175,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + cvc5 0 368 @@ -186,7 +186,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + MathSAT5n 0 343 @@ -197,7 +197,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + Yices2 0 305 @@ -208,7 +208,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + 2019-Par4n 0 23 @@ -219,7 +219,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + 2019-MathSAT-defaultn 0 9 @@ -230,7 +230,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + veriT+raSAT+Redlog 0 1 @@ -252,7 +252,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + 2020-CVC4n 0 39727329.82627309.1943973108733021 @@ -261,7 +261,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + z3n 0 37766145.42766136.7173772869153053 @@ -270,7 +270,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + 2019-CVC4n 0 37214036.43814036.59537229775223611 @@ -279,7 +279,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + cvc5 - fixedn 0 36844719.7844713.2323682868262035 @@ -288,7 +288,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + cvc5 0 36844732.05844720.2833682868262035 @@ -297,7 +297,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + MathSAT5n 0 34361308.20961284.1113432746987049 @@ -306,7 +306,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + Yices2 0 30533226.94833228.107305241642210322 @@ -315,7 +315,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + 2019-Par4n 0 256813.0494609.662523224032 @@ -324,7 +324,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + 2019-MathSAT-defaultn 0 93.6873.6992704210 @@ -333,7 +333,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + veriT+raSAT+Redlog 0 131200.00831200.0111102640326 @@ -353,7 +353,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + 2020-CVC4n 0 31023282.21423261.4723103100239721 @@ -362,7 +362,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + 2019-CVC4n 0 29710317.30810317.48229729701012311 @@ -371,7 +371,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + cvc5 - fixedn 0 28640630.80740624.2712862860479735 @@ -380,7 +380,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + cvc5 0 28640643.69440631.8022862860479735 @@ -389,7 +389,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + z3n 0 28658871.47458862.7372862860479753 @@ -398,7 +398,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + MathSAT5n 0 27439444.64839420.1772742740599749 @@ -407,7 +407,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + Yices2 0 24111617.11811618.0872412410418522 @@ -416,7 +416,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + 2019-Par4n 0 235613.0363409.2962323014062 @@ -425,7 +425,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + 2019-MathSAT-defaultn 0 20.4410.44222004280 @@ -434,7 +434,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + veriT+raSAT+Redlog 0 127600.00827600.0111102340626 @@ -454,7 +454,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + z3n 0 912473.9522473.9891091233753 @@ -463,7 +463,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + 2020-CVC4n 0 872799.7412799.84487087633721 @@ -472,7 +472,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + cvc5 0 822887.8042887.922820821133735 @@ -481,7 +481,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + cvc5 - fixedn 0 822888.3552888.342820821133735 @@ -490,7 +490,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + 2019-CVC4n 0 753707.753707.73475075934611 @@ -499,7 +499,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + MathSAT5n 0 6917063.56117063.934690692433749 @@ -508,7 +508,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + Yices2 0 6416809.8316810.021640641435222 @@ -517,7 +517,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + 2019-MathSAT-defaultn 0 73.2463.24970704230 @@ -526,7 +526,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + 2019-Par4n 0 20.0130.36420204282 @@ -535,7 +535,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + veriT+raSAT+Redlog 0 02400.02400.0000242826 @@ -555,7 +555,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + 2020-CVC4n 0 3811164.8741143.8013812988349038 @@ -564,7 +564,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + z3n 0 3691722.1041713.0583692799061061 @@ -573,7 +573,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + 2019-CVC4n 0 364612.777612.69736429272303619 @@ -582,7 +582,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + cvc5 - fixedn 0 3541390.81383.5793542787676049 @@ -591,7 +591,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + cvc5 0 3541396.2951383.7043542787676049 @@ -600,7 +600,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + MathSAT5n 0 3351632.6471632.793352706595057 @@ -609,7 +609,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + Yices2 0 2851046.6121047.164285221644210342 @@ -618,7 +618,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + 2019-Par4n 0 18440.449330.6551816294039 @@ -627,7 +627,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + 2019-MathSAT-defaultn 0 93.6873.6992704210 @@ -636,7 +636,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + veriT+raSAT+Redlog 0 1624.008624.0111102640326 @@ -660,7 +660,6 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + - diff --git a/archive/2021/results/qf-equality-nonlineararith-unsat-core.html b/archive/2021/results/qf-equality-nonlineararith-unsat-core.html index 492601e2..e21749db 100644 --- a/archive/2021/results/qf-equality-nonlineararith-unsat-core.html +++ b/archive/2021/results/qf-equality-nonlineararith-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Equality+NonLinearArith (Unsat Core Track)

    Competition results for the QF_Equality+NonLinearArith - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_Equality+NonLinearArith (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5-uccvc5-uc - - + + @@ -126,7 +126,7 @@

    QF_Equality+NonLinearArith (Unsat Core Track)

    - + cvc5-uc 0 80015 @@ -137,7 +137,7 @@

    QF_Equality+NonLinearArith (Unsat Core Track)

    - + MathSAT5n 0 65084 @@ -148,7 +148,7 @@

    QF_Equality+NonLinearArith (Unsat Core Track)

    - + z3n 0 27006 @@ -159,7 +159,7 @@

    QF_Equality+NonLinearArith (Unsat Core Track)

    - + Yices2 0 0 @@ -181,7 +181,7 @@

    QF_Equality+NonLinearArith (Unsat Core Track)

    - + cvc5-uc 0 800151434.8771435.3030 @@ -190,7 +190,7 @@

    QF_Equality+NonLinearArith (Unsat Core Track)

    - + MathSAT5n 0 6508413766.06813767.34211 @@ -199,7 +199,7 @@

    QF_Equality+NonLinearArith (Unsat Core Track)

    - + z3n 0 270062412.7922413.3352 @@ -208,7 +208,7 @@

    QF_Equality+NonLinearArith (Unsat Core Track)

    - + Yices2 0 00.652.3360 @@ -232,7 +232,6 @@

    QF_Equality+NonLinearArith (Unsat Core Track)

    - + - diff --git a/archive/2021/results/qf-equality-single-query.html b/archive/2021/results/qf-equality-single-query.html index a53cdc44..1ab30bb4 100644 --- a/archive/2021/results/qf-equality-single-query.html +++ b/archive/2021/results/qf-equality-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Equality (Single Query Track)

    Competition results for the QF_Equality - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_Equality (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_Equality (Single Query Track)

    - + z3n 0 4022 @@ -142,7 +142,7 @@

    QF_Equality (Single Query Track)

    - + cvc5 0 4009 @@ -153,7 +153,7 @@

    QF_Equality (Single Query Track)

    - + 2020-z3n 0 3920 @@ -164,7 +164,7 @@

    QF_Equality (Single Query Track)

    - + 2020-Yices2n 0 3822 @@ -175,7 +175,7 @@

    QF_Equality (Single Query Track)

    - + 2020-Yices2-fixedn 0 3822 @@ -186,7 +186,7 @@

    QF_Equality (Single Query Track)

    - + Yices2 0 3822 @@ -197,7 +197,7 @@

    QF_Equality (Single Query Track)

    - + SMTInterpol 0 3817 @@ -208,7 +208,7 @@

    QF_Equality (Single Query Track)

    - + MathSAT5n 0 3733 @@ -219,7 +219,7 @@

    QF_Equality (Single Query Track)

    - + 2019-Par4n 0 3521 @@ -230,7 +230,7 @@

    QF_Equality (Single Query Track)

    - + veriT 0 3521 @@ -241,7 +241,7 @@

    QF_Equality (Single Query Track)

    - + OpenSMT 0 3517 @@ -263,7 +263,7 @@

    QF_Equality (Single Query Track)

    - + z3n 0 4022344609.165344628.6914022171823042070207 @@ -272,7 +272,7 @@

    QF_Equality (Single Query Track)

    - + cvc5 0 4009373038.902372982.5844009171522942200220 @@ -281,7 +281,7 @@

    QF_Equality (Single Query Track)

    - + 2020-z3n 0 3920184860.746184848.161392017052215106203106 @@ -290,7 +290,7 @@

    QF_Equality (Single Query Track)

    - + SMTInterpol 0 3825547427.64529767.1113825167021554040404 @@ -299,7 +299,7 @@

    QF_Equality (Single Query Track)

    - + 2020-Yices2n 0 38221072.2151084.12238221664215804070 @@ -308,7 +308,7 @@

    QF_Equality (Single Query Track)

    - + 2020-Yices2-fixedn 0 38221089.9931102.06238221664215804070 @@ -317,7 +317,7 @@

    QF_Equality (Single Query Track)

    - + Yices2 0 38221115.6851137.97538221664215804070 @@ -326,7 +326,7 @@

    QF_Equality (Single Query Track)

    - + MathSAT5n 0 373349918.93249894.2243733162421098940735 @@ -335,7 +335,7 @@

    QF_Equality (Single Query Track)

    - + 2019-Par4n 0 35222184.1981242.8835221495202707070 @@ -344,7 +344,7 @@

    QF_Equality (Single Query Track)

    - + veriT 0 35212544.9412542.05935211495202617071 @@ -353,7 +353,7 @@

    QF_Equality (Single Query Track)

    - + OpenSMT 0 351713552.39513524.9235171495202257075 @@ -373,7 +373,7 @@

    QF_Equality (Single Query Track)

    - + z3n 0 1718210655.534210658.2661718171801522359207 @@ -382,7 +382,7 @@

    QF_Equality (Single Query Track)

    - + cvc5 0 1715210278.655210279.8671715171501552359220 @@ -391,7 +391,7 @@

    QF_Equality (Single Query Track)

    - + 2020-z3n 0 170595858.79395860.474170517050622462106 @@ -400,7 +400,7 @@

    QF_Equality (Single Query Track)

    - + SMTInterpol 0 1670244542.387241847.5531670167002002359404 @@ -409,7 +409,7 @@

    QF_Equality (Single Query Track)

    - + 2020-Yices2-fixedn 0 166455.79162.207166416640025650 @@ -418,7 +418,7 @@

    QF_Equality (Single Query Track)

    - + 2020-Yices2n 0 166456.09862.635166416640025650 @@ -427,7 +427,7 @@

    QF_Equality (Single Query Track)

    - + Yices2 0 166455.97468.367166416640025650 @@ -436,7 +436,7 @@

    QF_Equality (Single Query Track)

    - + MathSAT5n 0 1624176.418176.58516241624040256535 @@ -445,7 +445,7 @@

    QF_Equality (Single Query Track)

    - + 2019-Par4n 0 149528.1368.538149514950027340 @@ -454,7 +454,7 @@

    QF_Equality (Single Query Track)

    - + veriT 0 1495148.797146.654149514950027341 @@ -463,7 +463,7 @@

    QF_Equality (Single Query Track)

    - + OpenSMT 0 1495646.262640.611149514950027345 @@ -483,7 +483,7 @@

    QF_Equality (Single Query Track)

    - + z3n 0 2304133953.631133970.425230402304551870207 @@ -492,7 +492,7 @@

    QF_Equality (Single Query Track)

    - + cvc5 0 2294162760.248162702.717229402294651870220 @@ -501,7 +501,7 @@

    QF_Equality (Single Query Track)

    - + 2020-z3n 0 221589001.95388987.686221502215441970106 @@ -510,7 +510,7 @@

    QF_Equality (Single Query Track)

    - + 2020-Yices2n 0 21581016.1171021.488215802158020710 @@ -519,7 +519,7 @@

    QF_Equality (Single Query Track)

    - + 2020-Yices2-fixedn 0 21581034.2011039.855215802158020710 @@ -528,7 +528,7 @@

    QF_Equality (Single Query Track)

    - + Yices2 0 21581059.7111069.608215802158020710 @@ -537,7 +537,7 @@

    QF_Equality (Single Query Track)

    - + SMTInterpol 0 2155302885.254287919.5582155021552041870404 @@ -546,7 +546,7 @@

    QF_Equality (Single Query Track)

    - + MathSAT5n 0 210949742.51449717.63921090210949207135 @@ -555,7 +555,7 @@

    QF_Equality (Single Query Track)

    - + 2019-Par4n 0 20272156.0681174.342202702027022020 @@ -564,7 +564,7 @@

    QF_Equality (Single Query Track)

    - + veriT 0 20262396.1442395.405202602026122021 @@ -573,7 +573,7 @@

    QF_Equality (Single Query Track)

    - + OpenSMT 0 202212906.13412884.309202202022522025 @@ -593,7 +593,7 @@

    QF_Equality (Single Query Track)

    - + 2020-Yices2-fixedn 0 3816378.626390.65538161664215264076 @@ -602,7 +602,7 @@

    QF_Equality (Single Query Track)

    - + 2020-Yices2n 0 3816378.949390.76438161664215264076 @@ -611,7 +611,7 @@

    QF_Equality (Single Query Track)

    - + Yices2 0 3816379.975402.09838161664215264076 @@ -620,7 +620,7 @@

    QF_Equality (Single Query Track)

    - + z3n 0 379311883.30411876.6673793166721264360436 @@ -629,7 +629,7 @@

    QF_Equality (Single Query Track)

    - + 2020-z3n 0 37907196.6587178.721379016632127236203236 @@ -638,7 +638,7 @@

    QF_Equality (Single Query Track)

    - + cvc5 0 378112982.99812968.1463781167021114480448 @@ -647,7 +647,7 @@

    QF_Equality (Single Query Track)

    - + SMTInterpol 0 372730621.03119364.3753727167020575020502 @@ -656,7 +656,7 @@

    QF_Equality (Single Query Track)

    - + MathSAT5n 0 36962846.2612820.59636961624207212640772 @@ -665,7 +665,7 @@

    QF_Equality (Single Query Track)

    - + 2019-Par4n 0 3519308.138340.60435191495202437073 @@ -674,7 +674,7 @@

    QF_Equality (Single Query Track)

    - + veriT 0 3518538.033534.9335181495202347074 @@ -683,7 +683,7 @@

    QF_Equality (Single Query Track)

    - + OpenSMT 0 34713671.6783643.5323471148919825170751 @@ -707,7 +707,6 @@

    QF_Equality (Single Query Track)

    - + - diff --git a/archive/2021/results/qf-equality-unsat-core.html b/archive/2021/results/qf-equality-unsat-core.html index 904db7c5..4a549d6f 100644 --- a/archive/2021/results/qf-equality-unsat-core.html +++ b/archive/2021/results/qf-equality-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Equality (Unsat Core Track)

    Competition results for the QF_Equality - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_Equality (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5-uccvc5-uc - - + + @@ -126,7 +126,7 @@

    QF_Equality (Unsat Core Track)

    - + z3n 0 1143660 @@ -137,7 +137,7 @@

    QF_Equality (Unsat Core Track)

    - + cvc5-uc 0 447703 @@ -148,7 +148,7 @@

    QF_Equality (Unsat Core Track)

    - + SMTInterpol 0 386916 @@ -159,7 +159,7 @@

    QF_Equality (Unsat Core Track)

    - + 2020-Yices2-fixedn 0 276317 @@ -170,7 +170,7 @@

    QF_Equality (Unsat Core Track)

    - + Yices2 0 276317 @@ -181,7 +181,7 @@

    QF_Equality (Unsat Core Track)

    - + 2020-SMTInterpol-fixedn 0 239090 @@ -192,7 +192,7 @@

    QF_Equality (Unsat Core Track)

    - + MathSAT5n 0 237041 @@ -203,7 +203,7 @@

    QF_Equality (Unsat Core Track)

    - + SMTInterpol-remus 0 221318 @@ -225,7 +225,7 @@

    QF_Equality (Unsat Core Track)

    - + z3n 0 1143660152017.302152024.898 @@ -234,7 +234,7 @@

    QF_Equality (Unsat Core Track)

    - + cvc5-uc 0 447703229959.22229942.384170 @@ -243,7 +243,7 @@

    QF_Equality (Unsat Core Track)

    - + SMTInterpol 0 399875245120.765232160.671163 @@ -252,7 +252,7 @@

    QF_Equality (Unsat Core Track)

    - + SMTInterpol-remus 0 3741741425716.311318558.765172 @@ -261,7 +261,7 @@

    QF_Equality (Unsat Core Track)

    - + 2020-Yices2-fixedn 0 2763173731.1893692.8380 @@ -270,7 +270,7 @@

    QF_Equality (Unsat Core Track)

    - + Yices2 0 2763173909.9143852.5640 @@ -279,7 +279,7 @@

    QF_Equality (Unsat Core Track)

    - + 2020-SMTInterpol-fixedn 0 23909014598.448490.4310 @@ -288,7 +288,7 @@

    QF_Equality (Unsat Core Track)

    - + MathSAT5n 0 2370411859.1491844.5010 @@ -312,7 +312,6 @@

    QF_Equality (Unsat Core Track)

    - + - diff --git a/archive/2021/results/qf-fp-cloud.html b/archive/2021/results/qf-fp-cloud.html index b7183a42..f1d31c5f 100644 --- a/archive/2021/results/qf-fp-cloud.html +++ b/archive/2021/results/qf-fp-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_FP (Cloud Track)

    Competition results for the QF_FP - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    QF_FP (Cloud Track)

    - + cvc5-gg 0 613230.47560688 @@ -126,7 +126,7 @@

    QF_FP (Cloud Track)

    - + Par4n 7 70.11477070 @@ -146,7 +146,7 @@

    QF_FP (Cloud Track)

    - + cvc5-gg 0 08400.0000778 @@ -155,7 +155,7 @@

    QF_FP (Cloud Track)

    - + Par4n 7 70.114770070 @@ -175,7 +175,7 @@

    QF_FP (Cloud Track)

    - + cvc5-gg 0 64830.475606178 @@ -184,7 +184,7 @@

    QF_FP (Cloud Track)

    - + Par4n 0 00.0000770 @@ -204,7 +204,7 @@

    QF_FP (Cloud Track)

    - + cvc5-gg 0 0336.00001414 @@ -213,7 +213,7 @@

    QF_FP (Cloud Track)

    - + Par4n 7 70.11477070 @@ -237,7 +237,6 @@

    QF_FP (Cloud Track)

    - + - diff --git a/archive/2021/results/qf-fp-incremental.html b/archive/2021/results/qf-fp-incremental.html index b5142945..7fbcf88c 100644 --- a/archive/2021/results/qf-fp-incremental.html +++ b/archive/2021/results/qf-fp-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_FP (Incremental Track)

    Competition results for the QF_FP - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_FP (Incremental Track)

    Parallel Performance Bitwuzla - - + + @@ -124,7 +124,7 @@

    QF_FP (Incremental Track)

    - + 2020-Bitwuzla-fixedn 0 6635.5435.53500 @@ -133,7 +133,7 @@

    QF_FP (Incremental Track)

    - + Bitwuzla - fixedn 0 6635.6725.64200 @@ -142,7 +142,7 @@

    QF_FP (Incremental Track)

    - + Bitwuzla 0 6635.7255.700 @@ -151,7 +151,7 @@

    QF_FP (Incremental Track)

    - + MathSAT5n 0 66317.84317.79500 @@ -160,7 +160,7 @@

    QF_FP (Incremental Track)

    - + cvc5-inc 0 66320.63819.94500 @@ -169,7 +169,7 @@

    QF_FP (Incremental Track)

    - + 2020-CVC4-incn 0 66321.58121.52200 @@ -178,7 +178,7 @@

    QF_FP (Incremental Track)

    - + z3n 0 66375.96274.71800 @@ -202,7 +202,6 @@

    QF_FP (Incremental Track)

    - + - diff --git a/archive/2021/results/qf-fp-parallel.html b/archive/2021/results/qf-fp-parallel.html index b3e5f600..1d7e6f6a 100644 --- a/archive/2021/results/qf-fp-parallel.html +++ b/archive/2021/results/qf-fp-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_FP (Parallel Track)

    Competition results for the QF_FP - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    QF_FP (Parallel Track)

    - + cvc5-gg 0 614319.93360699 @@ -126,7 +126,7 @@

    QF_FP (Parallel Track)

    - + Par4n 8 70.13877080 @@ -146,7 +146,7 @@

    QF_FP (Parallel Track)

    - + cvc5-gg 0 08400.0000789 @@ -155,7 +155,7 @@

    QF_FP (Parallel Track)

    - + Par4n 8 70.138770080 @@ -175,7 +175,7 @@

    QF_FP (Parallel Track)

    - + cvc5-gg 0 65919.933606279 @@ -184,7 +184,7 @@

    QF_FP (Parallel Track)

    - + Par4n 0 00.0000870 @@ -204,7 +204,7 @@

    QF_FP (Parallel Track)

    - + cvc5-gg 0 0360.00001515 @@ -213,7 +213,7 @@

    QF_FP (Parallel Track)

    - + Par4n 8 70.13877080 @@ -237,7 +237,6 @@

    QF_FP (Parallel Track)

    - + - diff --git a/archive/2021/results/qf-fp-single-query.html b/archive/2021/results/qf-fp-single-query.html index dc37f606..ed23e731 100644 --- a/archive/2021/results/qf-fp-single-query.html +++ b/archive/2021/results/qf-fp-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_FP (Single Query Track)

    Competition results for the QF_FP - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_FP (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) BitwuzlaBitwuzlaBitwuzla - - + + Bitwuzla - - + + Bitwuzla - + @@ -131,7 +131,7 @@

    QF_FP (Single Query Track)

    - + 2020-Bitwuzlan 0 274 @@ -142,7 +142,7 @@

    QF_FP (Single Query Track)

    - + 2020-Bitwuzla-fixedn 0 272 @@ -153,7 +153,7 @@

    QF_FP (Single Query Track)

    - + Bitwuzla 0 266 @@ -164,7 +164,7 @@

    QF_FP (Single Query Track)

    - + COLIBRI - fixedn 0 260 @@ -175,7 +175,7 @@

    QF_FP (Single Query Track)

    - + 2020-COLIBRIn 0 257 @@ -186,7 +186,7 @@

    QF_FP (Single Query Track)

    - + cvc5 0 257 @@ -197,7 +197,7 @@

    QF_FP (Single Query Track)

    - + MathSAT5n 0 244 @@ -208,7 +208,7 @@

    QF_FP (Single Query Track)

    - + 2020-MathSAT5n 0 244 @@ -219,7 +219,7 @@

    QF_FP (Single Query Track)

    - + 2020-CVC4n 0 237 @@ -230,7 +230,7 @@

    QF_FP (Single Query Track)

    - + z3n 0 211 @@ -241,7 +241,7 @@

    QF_FP (Single Query Track)

    - + COLIBRI 1 260 @@ -263,7 +263,7 @@

    QF_FP (Single Query Track)

    - + 2020-Bitwuzlan 0 27451750.45251751.85274188862626 @@ -272,7 +272,7 @@

    QF_FP (Single Query Track)

    - + 2020-Bitwuzla-fixedn 0 27251768.45751722.04272188842828 @@ -281,7 +281,7 @@

    QF_FP (Single Query Track)

    - + Bitwuzla 0 26656745.49456744.086266184823434 @@ -290,7 +290,7 @@

    QF_FP (Single Query Track)

    - + COLIBRI - fixedn 0 26049387.66249388.689260176844040 @@ -299,7 +299,7 @@

    QF_FP (Single Query Track)

    - + 2020-COLIBRIn 0 25752841.42752833.162257174834343 @@ -308,7 +308,7 @@

    QF_FP (Single Query Track)

    - + cvc5 0 25769361.65669309.336257184734343 @@ -317,7 +317,7 @@

    QF_FP (Single Query Track)

    - + MathSAT5n 0 24480911.05980826.262244179655656 @@ -326,7 +326,7 @@

    QF_FP (Single Query Track)

    - + 2020-MathSAT5n 0 24481454.44281435.596244179655656 @@ -335,7 +335,7 @@

    QF_FP (Single Query Track)

    - + 2020-CVC4n 0 23788905.17888904.525237174636363 @@ -344,7 +344,7 @@

    QF_FP (Single Query Track)

    - + z3n 0 211121327.86126628.535211155568972 @@ -353,7 +353,7 @@

    QF_FP (Single Query Track)

    - + COLIBRI 1 26047665.32347663.379260173874039 @@ -373,7 +373,7 @@

    QF_FP (Single Query Track)

    - + 2020-Bitwuzla-fixedn 0 18810055.86710007.8351881880211028 @@ -382,7 +382,7 @@

    QF_FP (Single Query Track)

    - + 2020-Bitwuzlan 0 18810042.31210041.7011881880211026 @@ -391,7 +391,7 @@

    QF_FP (Single Query Track)

    - + Bitwuzla 0 18412731.1112732.251841840611034 @@ -400,7 +400,7 @@

    QF_FP (Single Query Track)

    - + cvc5 0 18413953.913955.6861841840611043 @@ -409,7 +409,7 @@

    QF_FP (Single Query Track)

    - + MathSAT5n 0 17918966.83518927.68617917901111056 @@ -418,7 +418,7 @@

    QF_FP (Single Query Track)

    - + 2020-MathSAT5n 0 17919260.69319247.66917917901111056 @@ -427,7 +427,7 @@

    QF_FP (Single Query Track)

    - + COLIBRI - fixedn 0 17617473.02717473.67217617601411040 @@ -436,7 +436,7 @@

    QF_FP (Single Query Track)

    - + 2020-COLIBRIn 0 17419782.07119773.66317417401611043 @@ -445,7 +445,7 @@

    QF_FP (Single Query Track)

    - + 2020-CVC4n 0 17427026.26127027.6417417401611063 @@ -454,7 +454,7 @@

    QF_FP (Single Query Track)

    - + COLIBRI 0 17319697.54619695.11817317301711039 @@ -463,7 +463,7 @@

    QF_FP (Single Query Track)

    - + z3n 0 15548370.72153670.05915515503511072 @@ -483,7 +483,7 @@

    QF_FP (Single Query Track)

    - + 2020-Bitwuzlan 0 8633308.1433310.149860861719726 @@ -492,7 +492,7 @@

    QF_FP (Single Query Track)

    - + COLIBRI - fixedn 0 8423514.63523515.018840841919740 @@ -501,7 +501,7 @@

    QF_FP (Single Query Track)

    - + 2020-Bitwuzla-fixedn 0 8433312.5933314.205840841919728 @@ -510,7 +510,7 @@

    QF_FP (Single Query Track)

    - + 2020-COLIBRIn 0 8324659.35624659.499830832019743 @@ -519,7 +519,7 @@

    QF_FP (Single Query Track)

    - + Bitwuzla 0 8235614.38435611.836820822119734 @@ -528,7 +528,7 @@

    QF_FP (Single Query Track)

    - + cvc5 0 7347007.75646953.65730733019743 @@ -537,7 +537,7 @@

    QF_FP (Single Query Track)

    - + MathSAT5n 0 6553544.22553498.576650653819756 @@ -546,7 +546,7 @@

    QF_FP (Single Query Track)

    - + 2020-MathSAT5n 0 6553793.74953787.928650653819756 @@ -555,7 +555,7 @@

    QF_FP (Single Query Track)

    - + 2020-CVC4n 0 6353478.91753476.885630634019763 @@ -564,7 +564,7 @@

    QF_FP (Single Query Track)

    - + z3n 0 5664557.13964558.476560564719772 @@ -573,7 +573,7 @@

    QF_FP (Single Query Track)

    - + COLIBRI 1 8719567.77719568.261870871619739 @@ -593,7 +593,7 @@

    QF_FP (Single Query Track)

    - + COLIBRI - fixedn 0 2461687.9081688.737246171755454 @@ -602,7 +602,7 @@

    QF_FP (Single Query Track)

    - + 2020-COLIBRIn 0 2441791.2811782.967244172725656 @@ -611,7 +611,7 @@

    QF_FP (Single Query Track)

    - + Bitwuzla 0 2132498.8052494.683213157568787 @@ -620,7 +620,7 @@

    QF_FP (Single Query Track)

    - + 2020-Bitwuzla-fixedn 0 2122508.752507.764212156568888 @@ -629,7 +629,7 @@

    QF_FP (Single Query Track)

    - + 2020-Bitwuzlan 0 2122510.7382508.455212156568888 @@ -638,7 +638,7 @@

    QF_FP (Single Query Track)

    - + cvc5 0 1913309.9963310.12519114348109109 @@ -647,7 +647,7 @@

    QF_FP (Single Query Track)

    - + 2020-CVC4n 0 1833344.23340.65518313746117117 @@ -656,7 +656,7 @@

    QF_FP (Single Query Track)

    - + 2020-MathSAT5n 0 1753562.3213540.72917513738125125 @@ -665,7 +665,7 @@

    QF_FP (Single Query Track)

    - + MathSAT5n 0 1743596.9783576.92917413935126126 @@ -674,7 +674,7 @@

    QF_FP (Single Query Track)

    - + z3n 0 1294455.3584455.41712910722171166 @@ -683,7 +683,7 @@

    QF_FP (Single Query Track)

    - + COLIBRI 1 2531530.5921528.6253169844746 @@ -707,7 +707,6 @@

    QF_FP (Single Query Track)

    - + - diff --git a/archive/2021/results/qf-fp-unsat-core.html b/archive/2021/results/qf-fp-unsat-core.html index 5bea0001..ccce6bd0 100644 --- a/archive/2021/results/qf-fp-unsat-core.html +++ b/archive/2021/results/qf-fp-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_FP (Unsat Core Track)

    Competition results for the QF_FP - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_FP (Unsat Core Track)

    Sequential PerformanceParallel Performance BitwuzlaBitwuzla - - + + @@ -126,7 +126,7 @@

    QF_FP (Unsat Core Track)

    - + Bitwuzla 0 92 @@ -137,7 +137,7 @@

    QF_FP (Unsat Core Track)

    - + 2020-Bitwuzlan 0 90 @@ -148,7 +148,7 @@

    QF_FP (Unsat Core Track)

    - + 2020-Bitwuzla-fixedn 0 89 @@ -159,7 +159,7 @@

    QF_FP (Unsat Core Track)

    - + cvc5-uc 0 84 @@ -170,7 +170,7 @@

    QF_FP (Unsat Core Track)

    - + z3n 0 25 @@ -181,7 +181,7 @@

    QF_FP (Unsat Core Track)

    - + MathSAT5n 0 0 @@ -203,7 +203,7 @@

    QF_FP (Unsat Core Track)

    - + Bitwuzla 0 928823.1958737.0120 @@ -212,7 +212,7 @@

    QF_FP (Unsat Core Track)

    - + 2020-Bitwuzlan 0 907922.1377747.1060 @@ -221,7 +221,7 @@

    QF_FP (Unsat Core Track)

    - + 2020-Bitwuzla-fixedn 0 897537.587435.2280 @@ -230,7 +230,7 @@

    QF_FP (Unsat Core Track)

    - + cvc5-uc 0 8426169.32925926.212 @@ -239,7 +239,7 @@

    QF_FP (Unsat Core Track)

    - + z3n 0 25910891.129910769.334710 @@ -248,7 +248,7 @@

    QF_FP (Unsat Core Track)

    - + MathSAT5n 0 0183.069197.9480 @@ -272,7 +272,6 @@

    QF_FP (Unsat Core Track)

    - + - diff --git a/archive/2021/results/qf-fparith-cloud.html b/archive/2021/results/qf-fparith-cloud.html index 3ab62090..96c7b732 100644 --- a/archive/2021/results/qf-fparith-cloud.html +++ b/archive/2021/results/qf-fparith-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_FPArith (Cloud Track)

    Competition results for the QF_FPArith - + division - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    QF_FPArith (Cloud Track)

    - + cvc5-gg 0 715689.40370710010 @@ -126,7 +126,7 @@

    QF_FPArith (Cloud Track)

    - + Par4n 8 90.137990800 @@ -146,7 +146,7 @@

    QF_FPArith (Cloud Track)

    - + cvc5-gg 0 010800.00009810 @@ -155,7 +155,7 @@

    QF_FPArith (Cloud Track)

    - + Par4n 8 90.137990080 @@ -175,7 +175,7 @@

    QF_FPArith (Cloud Track)

    - + cvc5-gg 0 74889.4037071910 @@ -184,7 +184,7 @@

    QF_FPArith (Cloud Track)

    - + Par4n 0 00.0000890 @@ -204,7 +204,7 @@

    QF_FPArith (Cloud Track)

    - + cvc5-gg 0 0408.000017017 @@ -213,7 +213,7 @@

    QF_FPArith (Cloud Track)

    - + Par4n 8 90.137990800 @@ -237,7 +237,6 @@

    QF_FPArith (Cloud Track)

    - + - diff --git a/archive/2021/results/qf-fparith-incremental.html b/archive/2021/results/qf-fparith-incremental.html index 80398eda..a8f82b75 100644 --- a/archive/2021/results/qf-fparith-incremental.html +++ b/archive/2021/results/qf-fparith-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_FPArith (Incremental Track)

    Competition results for the QF_FPArith - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_FPArith (Incremental Track)

    Parallel Performance cvc5-inc - - + + @@ -124,7 +124,7 @@

    QF_FPArith (Incremental Track)

    - + Bitwuzla - fixedn 0 27326464610.38764525.865105032 @@ -133,7 +133,7 @@

    QF_FPArith (Incremental Track)

    - + 2020-Bitwuzla-fixedn 0 27326465173.51364983.682105032 @@ -142,7 +142,7 @@

    QF_FPArith (Incremental Track)

    - + cvc5-inc 0 2395961928913.4031928379.0063377301305 @@ -151,7 +151,7 @@

    QF_FPArith (Incremental Track)

    - + 2020-CVC4-incn 0 2322122147701.9512147193.5954115701535 @@ -160,7 +160,7 @@

    QF_FPArith (Incremental Track)

    - + z3n 0 251061825.41761815.047123041 @@ -169,7 +169,7 @@

    QF_FPArith (Incremental Track)

    - + MathSAT5n 17 272313169018.643168210.6751056052 @@ -178,7 +178,7 @@

    QF_FPArith (Incremental Track)

    - + Bitwuzla 47 27163464632.53464378.5731735032 @@ -202,7 +202,6 @@

    QF_FPArith (Incremental Track)

    - + - diff --git a/archive/2021/results/qf-fparith-parallel.html b/archive/2021/results/qf-fparith-parallel.html index 8625c80d..3aa9c4dc 100644 --- a/archive/2021/results/qf-fparith-parallel.html +++ b/archive/2021/results/qf-fparith-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_FPArith (Parallel Track)

    Competition results for the QF_FPArith - + division - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    QF_FPArith (Parallel Track)

    - + cvc5-gg 0 716779.95970711011 @@ -126,7 +126,7 @@

    QF_FPArith (Parallel Track)

    - + Par4n 9 90.165990900 @@ -146,7 +146,7 @@

    QF_FPArith (Parallel Track)

    - + cvc5-gg 0 010800.00009911 @@ -155,7 +155,7 @@

    QF_FPArith (Parallel Track)

    - + Par4n 9 90.165990090 @@ -175,7 +175,7 @@

    QF_FPArith (Parallel Track)

    - + cvc5-gg 0 75979.9597072911 @@ -184,7 +184,7 @@

    QF_FPArith (Parallel Track)

    - + Par4n 0 00.0000990 @@ -204,7 +204,7 @@

    QF_FPArith (Parallel Track)

    - + cvc5-gg 0 0432.000018018 @@ -213,7 +213,7 @@

    QF_FPArith (Parallel Track)

    - + Par4n 9 90.165990900 @@ -237,7 +237,6 @@

    QF_FPArith (Parallel Track)

    - + - diff --git a/archive/2021/results/qf-fparith-single-query.html b/archive/2021/results/qf-fparith-single-query.html index 513b6fd9..060ae954 100644 --- a/archive/2021/results/qf-fparith-single-query.html +++ b/archive/2021/results/qf-fparith-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_FPArith (Single Query Track)

    Competition results for the QF_FPArith - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_FPArith (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    QF_FPArith (Single Query Track)

    - + cvc5 0 1636 @@ -142,7 +142,7 @@

    QF_FPArith (Single Query Track)

    - + Bitwuzla 0 1495 @@ -153,7 +153,7 @@

    QF_FPArith (Single Query Track)

    - + 2020-CVC4n 0 1456 @@ -164,7 +164,7 @@

    QF_FPArith (Single Query Track)

    - + MathSAT5n 0 1453 @@ -175,7 +175,7 @@

    QF_FPArith (Single Query Track)

    - + 2020-MathSAT5n 0 1452 @@ -186,7 +186,7 @@

    QF_FPArith (Single Query Track)

    - + 2020-Bitwuzlan 0 1375 @@ -197,7 +197,7 @@

    QF_FPArith (Single Query Track)

    - + 2020-Bitwuzla-fixedn 0 1373 @@ -208,7 +208,7 @@

    QF_FPArith (Single Query Track)

    - + COLIBRI - fixedn 0 1354 @@ -219,7 +219,7 @@

    QF_FPArith (Single Query Track)

    - + z3n 0 730 @@ -230,7 +230,7 @@

    QF_FPArith (Single Query Track)

    - + cvc5 - fixedn 0 154 @@ -241,7 +241,7 @@

    QF_FPArith (Single Query Track)

    - + 2020-COLIBRIn 3 1326 @@ -252,7 +252,7 @@

    QF_FPArith (Single Query Track)

    - + COLIBRI 8 1346 @@ -274,7 +274,7 @@

    QF_FPArith (Single Query Track)

    - + cvc5 0 1636128505.532128359.586163665198574074 @@ -283,7 +283,7 @@

    QF_FPArith (Single Query Track)

    - + Bitwuzla 0 149581176.57581166.11714956268696115445 @@ -292,7 +292,7 @@

    QF_FPArith (Single Query Track)

    - + 2020-CVC4n 0 1456148897.031148795.20814566158419915599 @@ -301,7 +301,7 @@

    QF_FPArith (Single Query Track)

    - + MathSAT5n 0 1453138422.013138225.992145361883510315492 @@ -310,7 +310,7 @@

    QF_FPArith (Single Query Track)

    - + 2020-MathSAT5n 0 1452139392.564139208.453145261783510315592 @@ -319,7 +319,7 @@

    QF_FPArith (Single Query Track)

    - + 2020-Bitwuzlan 0 137559255.85759189.38913755308452830728 @@ -328,7 +328,7 @@

    QF_FPArith (Single Query Track)

    - + 2020-Bitwuzla-fixedn 0 137359169.94659122.65613735308433030730 @@ -337,7 +337,7 @@

    QF_FPArith (Single Query Track)

    - + COLIBRI - fixedn 0 135489291.49189276.459135459875620215467 @@ -346,7 +346,7 @@

    QF_FPArith (Single Query Track)

    - + z3n 0 730157028.598162273.92473041731310987190 @@ -355,7 +355,7 @@

    QF_FPArith (Single Query Track)

    - + cvc5 - fixedn 0 154123.536123.55615424130015560 @@ -364,7 +364,7 @@

    QF_FPArith (Single Query Track)

    - + 2020-COLIBRIn 3 1326114074.219114004.723132658374322915591 @@ -373,7 +373,7 @@

    QF_FPArith (Single Query Track)

    - + COLIBRI 8 134687671.15887617.629134658775921015467 @@ -393,7 +393,7 @@

    QF_FPArith (Single Query Track)

    - + cvc5 0 65128877.05828864.287651651016104374 @@ -402,7 +402,7 @@

    QF_FPArith (Single Query Track)

    - + Bitwuzla 0 62614413.93314411.94626626017106745 @@ -411,7 +411,7 @@

    QF_FPArith (Single Query Track)

    - + MathSAT5n 0 61832042.9731956.768618618025106792 @@ -420,7 +420,7 @@

    QF_FPArith (Single Query Track)

    - + 2020-MathSAT5n 0 61732433.30832416.725617617025106892 @@ -429,7 +429,7 @@

    QF_FPArith (Single Query Track)

    - + 2020-CVC4n 0 61544655.11944654.364615615027106899 @@ -438,7 +438,7 @@

    QF_FPArith (Single Query Track)

    - + COLIBRI - fixedn 0 59828015.91728004.766598598045106767 @@ -447,7 +447,7 @@

    QF_FPArith (Single Query Track)

    - + COLIBRI 0 58731468.94231429.659587587056106767 @@ -456,7 +456,7 @@

    QF_FPArith (Single Query Track)

    - + 2020-Bitwuzla-fixedn 0 53010486.31910436.41553053002117830 @@ -465,7 +465,7 @@

    QF_FPArith (Single Query Track)

    - + 2020-Bitwuzlan 0 53010472.66610472.25653053002117828 @@ -474,7 +474,7 @@

    QF_FPArith (Single Query Track)

    - + z3n 0 41766126.76771427.343417417045124890 @@ -483,7 +483,7 @@

    QF_FPArith (Single Query Track)

    - + cvc5 - fixedn 0 2467.13567.17624240016860 @@ -492,7 +492,7 @@

    QF_FPArith (Single Query Track)

    - + 2020-COLIBRIn 3 58341257.2341206.417583583059106891 @@ -512,7 +512,7 @@

    QF_FPArith (Single Query Track)

    - + cvc5 0 98588828.47488695.29998509854967674 @@ -521,7 +521,7 @@

    QF_FPArith (Single Query Track)

    - + Bitwuzla 0 86955962.64255954.17786908693580645 @@ -530,7 +530,7 @@

    QF_FPArith (Single Query Track)

    - + 2020-Bitwuzlan 0 84537983.19137917.13384508451784828 @@ -539,7 +539,7 @@

    QF_FPArith (Single Query Track)

    - + 2020-Bitwuzla-fixedn 0 84337883.62637886.24184308431984830 @@ -548,7 +548,7 @@

    QF_FPArith (Single Query Track)

    - + 2020-CVC4n 0 84193441.91293340.84484108416380699 @@ -557,7 +557,7 @@

    QF_FPArith (Single Query Track)

    - + MathSAT5n 0 83595579.04395469.22383508356980692 @@ -566,7 +566,7 @@

    QF_FPArith (Single Query Track)

    - + 2020-MathSAT5n 0 83596159.25695991.72883508356980692 @@ -575,7 +575,7 @@

    QF_FPArith (Single Query Track)

    - + COLIBRI - fixedn 0 75650475.57450471.694756075614880667 @@ -584,7 +584,7 @@

    QF_FPArith (Single Query Track)

    - + 2020-COLIBRIn 0 74362304.02962285.209743074316180691 @@ -593,7 +593,7 @@

    QF_FPArith (Single Query Track)

    - + z3n 0 31382501.83182446.581313031357134090 @@ -602,7 +602,7 @@

    QF_FPArith (Single Query Track)

    - + cvc5 - fixedn 0 13056.40256.381300130015800 @@ -611,7 +611,7 @@

    QF_FPArith (Single Query Track)

    - + COLIBRI 8 75945402.21645387.97759075914580667 @@ -631,7 +631,7 @@

    QF_FPArith (Single Query Track)

    - + cvc5 0 142511144.47511088.70114255878382850285 @@ -640,7 +640,7 @@

    QF_FPArith (Single Query Track)

    - + Bitwuzla 0 13975120.5675106.3581397588809159154143 @@ -649,7 +649,7 @@

    QF_FPArith (Single Query Track)

    - + 2020-CVC4n 0 13328771.1788742.621332554778223155223 @@ -658,7 +658,7 @@

    QF_FPArith (Single Query Track)

    - + COLIBRI - fixedn 0 13284747.7154731.401132858674222815497 @@ -667,7 +667,7 @@

    QF_FPArith (Single Query Track)

    - + MathSAT5n 0 13188590.1568552.2341318556762238154227 @@ -676,7 +676,7 @@

    QF_FPArith (Single Query Track)

    - + 2020-MathSAT5n 0 13138726.758622.4641313552761242155231 @@ -685,7 +685,7 @@

    QF_FPArith (Single Query Track)

    - + 2020-Bitwuzla-fixedn 0 12884266.2794263.6751288495793115307115 @@ -694,7 +694,7 @@

    QF_FPArith (Single Query Track)

    - + 2020-Bitwuzlan 0 12874274.6534263.761287494793116307116 @@ -703,7 +703,7 @@

    QF_FPArith (Single Query Track)

    - + z3n 0 5648251.018246.887564320244275871268 @@ -712,7 +712,7 @@

    QF_FPArith (Single Query Track)

    - + cvc5 - fixedn 0 15387.96787.98415323130115561 @@ -721,7 +721,7 @@

    QF_FPArith (Single Query Track)

    - + 2020-COLIBRIn 3 12965140.8315097.6011296574722259155122 @@ -730,7 +730,7 @@

    QF_FPArith (Single Query Track)

    - + COLIBRI 8 13314464.3134443.364133157775422515486 @@ -754,7 +754,6 @@

    QF_FPArith (Single Query Track)

    - + - diff --git a/archive/2021/results/qf-fparith-unsat-core.html b/archive/2021/results/qf-fparith-unsat-core.html index 61ad9b0b..c1906e25 100644 --- a/archive/2021/results/qf-fparith-unsat-core.html +++ b/archive/2021/results/qf-fparith-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_FPArith (Unsat Core Track)

    Competition results for the QF_FPArith - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_FPArith (Unsat Core Track)

    Sequential PerformanceParallel Performance BitwuzlaBitwuzla - - + + @@ -126,7 +126,7 @@

    QF_FPArith (Unsat Core Track)

    - + Bitwuzla 0 31314 @@ -137,7 +137,7 @@

    QF_FPArith (Unsat Core Track)

    - + 2020-Bitwuzlan 0 31005 @@ -148,7 +148,7 @@

    QF_FPArith (Unsat Core Track)

    - + 2020-Bitwuzla-fixedn 0 30990 @@ -159,7 +159,7 @@

    QF_FPArith (Unsat Core Track)

    - + cvc5-uc 0 30791 @@ -170,7 +170,7 @@

    QF_FPArith (Unsat Core Track)

    - + z3n 0 14099 @@ -181,7 +181,7 @@

    QF_FPArith (Unsat Core Track)

    - + MathSAT5n 0 333 @@ -203,7 +203,7 @@

    QF_FPArith (Unsat Core Track)

    - + Bitwuzla 0 3131418479.79618383.8443 @@ -212,7 +212,7 @@

    QF_FPArith (Unsat Core Track)

    - + 2020-Bitwuzlan 0 3100512604.52412398.9441 @@ -221,7 +221,7 @@

    QF_FPArith (Unsat Core Track)

    - + 2020-Bitwuzla-fixedn 0 3099012401.89612291.4831 @@ -230,7 +230,7 @@

    QF_FPArith (Unsat Core Track)

    - + cvc5-uc 0 3079157636.14857372.89416 @@ -239,7 +239,7 @@

    QF_FPArith (Unsat Core Track)

    - + z3n 0 14099959050.254958899.598739 @@ -248,7 +248,7 @@

    QF_FPArith (Unsat Core Track)

    - + MathSAT5n 0 333371.691393.560 @@ -272,7 +272,6 @@

    QF_FPArith (Unsat Core Track)

    - + - diff --git a/archive/2021/results/qf-fplra-single-query.html b/archive/2021/results/qf-fplra-single-query.html index c2f2322c..4d1bf056 100644 --- a/archive/2021/results/qf-fplra-single-query.html +++ b/archive/2021/results/qf-fplra-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_FPLRA (Single Query Track)

    Competition results for the QF_FPLRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_FPLRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) BitwuzlaBitwuzlaBitwuzla - - + + COLIBRI - - + + COLIBRI - + @@ -131,7 +131,7 @@

    QF_FPLRA (Single Query Track)

    - + Bitwuzla 0 54 @@ -142,7 +142,7 @@

    QF_FPLRA (Single Query Track)

    - + COLIBRI - fixedn 0 53 @@ -153,7 +153,7 @@

    QF_FPLRA (Single Query Track)

    - + COLIBRI 0 53 @@ -164,7 +164,7 @@

    QF_FPLRA (Single Query Track)

    - + 2020-COLIBRIn 0 52 @@ -175,7 +175,7 @@

    QF_FPLRA (Single Query Track)

    - + 2020-CVC4n 0 48 @@ -186,7 +186,7 @@

    QF_FPLRA (Single Query Track)

    - + cvc5 0 47 @@ -197,7 +197,7 @@

    QF_FPLRA (Single Query Track)

    - + MathSAT5n 0 46 @@ -208,7 +208,7 @@

    QF_FPLRA (Single Query Track)

    - + 2020-MathSAT5n 0 46 @@ -219,7 +219,7 @@

    QF_FPLRA (Single Query Track)

    - + z3n 0 42 @@ -241,7 +241,7 @@

    QF_FPLRA (Single Query Track)

    - + Bitwuzla 0 542954.3412954.7445451311 @@ -250,7 +250,7 @@

    QF_FPLRA (Single Query Track)

    - + COLIBRI - fixedn 0 532496.1532496.3155349422 @@ -259,7 +259,7 @@

    QF_FPLRA (Single Query Track)

    - + COLIBRI 0 532504.4542504.5575349422 @@ -268,7 +268,7 @@

    QF_FPLRA (Single Query Track)

    - + 2020-COLIBRIn 0 523695.1773678.4525248433 @@ -277,7 +277,7 @@

    QF_FPLRA (Single Query Track)

    - + 2020-CVC4n 0 4810857.89210858.4184845377 @@ -286,7 +286,7 @@

    QF_FPLRA (Single Query Track)

    - + cvc5 0 4710284.86810284.9494745288 @@ -295,7 +295,7 @@

    QF_FPLRA (Single Query Track)

    - + MathSAT5n 0 4611246.14411244.1634645199 @@ -304,7 +304,7 @@

    QF_FPLRA (Single Query Track)

    - + 2020-MathSAT5n 0 4611276.38111276.4144645199 @@ -313,7 +313,7 @@

    QF_FPLRA (Single Query Track)

    - + z3n 0 4219266.98619267.594424111311 @@ -333,7 +333,7 @@

    QF_FPLRA (Single Query Track)

    - + Bitwuzla 0 511125.0521125.37651510041 @@ -342,7 +342,7 @@

    QF_FPLRA (Single Query Track)

    - + COLIBRI - fixedn 0 492482.882483.0449490242 @@ -351,7 +351,7 @@

    QF_FPLRA (Single Query Track)

    - + COLIBRI 0 492490.6842490.7849490242 @@ -360,7 +360,7 @@

    QF_FPLRA (Single Query Track)

    - + 2020-COLIBRIn 0 483671.1783654.4548480343 @@ -369,7 +369,7 @@

    QF_FPLRA (Single Query Track)

    - + MathSAT5n 0 457644.047643.07745450649 @@ -378,7 +378,7 @@

    QF_FPLRA (Single Query Track)

    - + 2020-MathSAT5n 0 457675.2277675.25945450649 @@ -387,7 +387,7 @@

    QF_FPLRA (Single Query Track)

    - + cvc5 0 457833.4277833.50545450648 @@ -396,7 +396,7 @@

    QF_FPLRA (Single Query Track)

    - + 2020-CVC4n 0 458445.2248445.39845450647 @@ -405,7 +405,7 @@

    QF_FPLRA (Single Query Track)

    - + z3n 0 4115666.93715667.5454141010411 @@ -425,7 +425,7 @@

    QF_FPLRA (Single Query Track)

    - + COLIBRI - fixedn 0 413.27313.2754040512 @@ -434,7 +434,7 @@

    QF_FPLRA (Single Query Track)

    - + COLIBRI 0 413.7713.7774040512 @@ -443,7 +443,7 @@

    QF_FPLRA (Single Query Track)

    - + 2020-COLIBRIn 0 424.024.0024040513 @@ -452,7 +452,7 @@

    QF_FPLRA (Single Query Track)

    - + Bitwuzla 0 31829.2891829.3683031511 @@ -461,7 +461,7 @@

    QF_FPLRA (Single Query Track)

    - + 2020-CVC4n 0 32412.6682413.0213031517 @@ -470,7 +470,7 @@

    QF_FPLRA (Single Query Track)

    - + cvc5 0 22451.4412451.4442022518 @@ -479,7 +479,7 @@

    QF_FPLRA (Single Query Track)

    - + z3n 0 13600.0493600.04910135111 @@ -488,7 +488,7 @@

    QF_FPLRA (Single Query Track)

    - + MathSAT5n 0 13602.1053601.0851013519 @@ -497,7 +497,7 @@

    QF_FPLRA (Single Query Track)

    - + 2020-MathSAT5n 0 13601.1553601.1551013519 @@ -517,7 +517,7 @@

    QF_FPLRA (Single Query Track)

    - + COLIBRI - fixedn 0 53144.153144.3155349422 @@ -526,7 +526,7 @@

    QF_FPLRA (Single Query Track)

    - + COLIBRI 0 53152.454152.5575349422 @@ -535,7 +535,7 @@

    QF_FPLRA (Single Query Track)

    - + 2020-COLIBRIn 0 52167.177150.4525248433 @@ -544,7 +544,7 @@

    QF_FPLRA (Single Query Track)

    - + Bitwuzla 0 46253.77253.7854645199 @@ -553,7 +553,7 @@

    QF_FPLRA (Single Query Track)

    - + cvc5 0 43321.057321.066434211212 @@ -562,7 +562,7 @@

    QF_FPLRA (Single Query Track)

    - + MathSAT5n 0 42329.815327.75424111313 @@ -571,7 +571,7 @@

    QF_FPLRA (Single Query Track)

    - + 2020-MathSAT5n 0 42328.55328.554424111313 @@ -580,7 +580,7 @@

    QF_FPLRA (Single Query Track)

    - + 2020-CVC4n 0 42336.044336.034424111313 @@ -589,7 +589,7 @@

    QF_FPLRA (Single Query Track)

    - + z3n 0 151113.4971113.507151414038 @@ -613,7 +613,6 @@

    QF_FPLRA (Single Query Track)

    - + - diff --git a/archive/2021/results/qf-idl-cloud.html b/archive/2021/results/qf-idl-cloud.html index 7cdf4665..1634890d 100644 --- a/archive/2021/results/qf-idl-cloud.html +++ b/archive/2021/results/qf-idl-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_IDL (Cloud Track)

    Competition results for the QF_IDL - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    QF_IDL (Cloud Track)

    - + Par4n 0 616059.9056331010 @@ -126,7 +126,7 @@

    QF_IDL (Cloud Track)

    - + SMTS cube-and-conquer 0 118461.5921101515 @@ -135,7 +135,7 @@

    QF_IDL (Cloud Track)

    - + cvc5-gg 0 019200.00001616 @@ -144,7 +144,7 @@

    QF_IDL (Cloud Track)

    - + SMTS portfolio 0 019200.00001616 @@ -164,7 +164,7 @@

    QF_IDL (Cloud Track)

    - + Par4n 0 35455.38733031010 @@ -173,7 +173,7 @@

    QF_IDL (Cloud Track)

    - + SMTS cube-and-conquer 0 16461.59211051015 @@ -182,7 +182,7 @@

    QF_IDL (Cloud Track)

    - + cvc5-gg 0 07200.000061016 @@ -191,7 +191,7 @@

    QF_IDL (Cloud Track)

    - + SMTS portfolio 0 07200.000061016 @@ -211,7 +211,7 @@

    QF_IDL (Cloud Track)

    - + Par4n 0 34604.51830321110 @@ -220,7 +220,7 @@

    QF_IDL (Cloud Track)

    - + SMTS cube-and-conquer 0 06000.000051115 @@ -229,7 +229,7 @@

    QF_IDL (Cloud Track)

    - + cvc5-gg 0 06000.000051116 @@ -238,7 +238,7 @@

    QF_IDL (Cloud Track)

    - + SMTS portfolio 0 06000.000051116 @@ -258,7 +258,7 @@

    QF_IDL (Cloud Track)

    - + SMTS cube-and-conquer 0 0384.00001616 @@ -267,7 +267,7 @@

    QF_IDL (Cloud Track)

    - + cvc5-gg 0 0384.00001616 @@ -276,7 +276,7 @@

    QF_IDL (Cloud Track)

    - + Par4n 0 0384.00001616 @@ -285,7 +285,7 @@

    QF_IDL (Cloud Track)

    - + SMTS portfolio 0 0384.00001616 @@ -309,7 +309,6 @@

    QF_IDL (Cloud Track)

    - + - diff --git a/archive/2021/results/qf-idl-model-validation.html b/archive/2021/results/qf-idl-model-validation.html index f1ab4406..fc909456 100644 --- a/archive/2021/results/qf-idl-model-validation.html +++ b/archive/2021/results/qf-idl-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_IDL (Model Validation Track)

    Competition results for the QF_IDL - + logic - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_IDL (Model Validation Track)

    Sequential PerformanceParallel Performance YicesLSYicesLS - - + + @@ -126,7 +126,7 @@

    QF_IDL (Model Validation Track)

    - + YicesLS 0 646 @@ -137,7 +137,7 @@

    QF_IDL (Model Validation Track)

    - + z3-mvn 0 634 @@ -148,7 +148,7 @@

    QF_IDL (Model Validation Track)

    - + 2020-z3n 0 629 @@ -159,7 +159,7 @@

    QF_IDL (Model Validation Track)

    - + 2020-Yices2-fixed Model Validationn 0 625 @@ -170,7 +170,7 @@

    QF_IDL (Model Validation Track)

    - + 2020-Yices2 Model Validationn 0 625 @@ -181,7 +181,7 @@

    QF_IDL (Model Validation Track)

    - + Yices2 model-validation 0 624 @@ -192,7 +192,7 @@

    QF_IDL (Model Validation Track)

    - + cvc5-mv 0 552 @@ -203,7 +203,7 @@

    QF_IDL (Model Validation Track)

    - + OpenSMT 0 530 @@ -214,7 +214,7 @@

    QF_IDL (Model Validation Track)

    - + SMTInterpol 0 437 @@ -225,7 +225,7 @@

    QF_IDL (Model Validation Track)

    - + MathSAT5n 0 421 @@ -247,7 +247,7 @@

    QF_IDL (Model Validation Track)

    - + YicesLS 0 64688774.95588692.4234 @@ -256,7 +256,7 @@

    QF_IDL (Model Validation Track)

    - + z3-mvn 0 63492271.06692235.06357 @@ -265,7 +265,7 @@

    QF_IDL (Model Validation Track)

    - + 2020-z3n 0 62992769.04992758.94958 @@ -274,7 +274,7 @@

    QF_IDL (Model Validation Track)

    - + 2020-Yices2 Model Validationn 0 62592506.74292453.68866 @@ -283,7 +283,7 @@

    QF_IDL (Model Validation Track)

    - + 2020-Yices2-fixed Model Validationn 0 62592472.04992462.61766 @@ -292,7 +292,7 @@

    QF_IDL (Model Validation Track)

    - + Yices2 model-validation 0 62494221.2994182.83867 @@ -301,7 +301,7 @@

    QF_IDL (Model Validation Track)

    - + cvc5-mv 0 552232842.693232833.567139 @@ -310,7 +310,7 @@

    QF_IDL (Model Validation Track)

    - + OpenSMT 0 530236312.521236261.39161 @@ -319,7 +319,7 @@

    QF_IDL (Model Validation Track)

    - + SMTInterpol 0 439367199.626361056.673252 @@ -328,7 +328,7 @@

    QF_IDL (Model Validation Track)

    - + MathSAT5n 0 421353438.898353428.475260 @@ -352,7 +352,6 @@

    QF_IDL (Model Validation Track)

    - + - diff --git a/archive/2021/results/qf-idl-parallel.html b/archive/2021/results/qf-idl-parallel.html index 7cec33ed..be9007c6 100644 --- a/archive/2021/results/qf-idl-parallel.html +++ b/archive/2021/results/qf-idl-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_IDL (Parallel Track)

    Competition results for the QF_IDL - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    QF_IDL (Parallel Track)

    - + Par4n 0 716531.2947341010 @@ -126,7 +126,7 @@

    QF_IDL (Parallel Track)

    - + cvc5-gg 0 020400.00001717 @@ -146,7 +146,7 @@

    QF_IDL (Parallel Track)

    - + Par4n 0 35245.7433031110 @@ -155,7 +155,7 @@

    QF_IDL (Parallel Track)

    - + cvc5-gg 0 07200.000061117 @@ -175,7 +175,7 @@

    QF_IDL (Parallel Track)

    - + Par4n 0 45285.55440421110 @@ -184,7 +184,7 @@

    QF_IDL (Parallel Track)

    - + cvc5-gg 0 07200.000061117 @@ -204,7 +204,7 @@

    QF_IDL (Parallel Track)

    - + cvc5-gg 0 0408.00001717 @@ -213,7 +213,7 @@

    QF_IDL (Parallel Track)

    - + Par4n 0 0408.00001717 @@ -237,7 +237,6 @@

    QF_IDL (Parallel Track)

    - + - diff --git a/archive/2021/results/qf-idl-single-query.html b/archive/2021/results/qf-idl-single-query.html index b4d6e777..6c779d41 100644 --- a/archive/2021/results/qf-idl-single-query.html +++ b/archive/2021/results/qf-idl-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_IDL (Single Query Track)

    Competition results for the QF_IDL - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_IDL (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) YicesLSYicesLSYicesLS - - + + YicesLS - - + + YicesLS - + @@ -131,7 +131,7 @@

    QF_IDL (Single Query Track)

    - + z3n 0 1012 @@ -142,7 +142,7 @@

    QF_IDL (Single Query Track)

    - + YicesLS 0 1009 @@ -153,7 +153,7 @@

    QF_IDL (Single Query Track)

    - + 2019-Par4n 0 957 @@ -164,7 +164,7 @@

    QF_IDL (Single Query Track)

    - + Yices2 0 938 @@ -175,7 +175,7 @@

    QF_IDL (Single Query Track)

    - + cvc5 0 886 @@ -186,7 +186,7 @@

    QF_IDL (Single Query Track)

    - + cvc5 - fixedn 0 885 @@ -197,7 +197,7 @@

    QF_IDL (Single Query Track)

    - + OpenSMT 0 833 @@ -208,7 +208,7 @@

    QF_IDL (Single Query Track)

    - + OpenSMT - fixedn 0 831 @@ -219,7 +219,7 @@

    QF_IDL (Single Query Track)

    - + veriT 0 777 @@ -230,7 +230,7 @@

    QF_IDL (Single Query Track)

    - + MathSAT5n 0 714 @@ -241,7 +241,7 @@

    QF_IDL (Single Query Track)

    - + SMTInterpol 0 698 @@ -263,7 +263,7 @@

    QF_IDL (Single Query Track)

    - + z3n 0 1012219082.607218906.4511012624388126126 @@ -272,7 +272,7 @@

    QF_IDL (Single Query Track)

    - + YicesLS 0 1009187477.24187520.9351009625384129119 @@ -281,7 +281,7 @@

    QF_IDL (Single Query Track)

    - + 2019-Par4n 0 1009338429.864201305.9911009623386129129 @@ -290,7 +290,7 @@

    QF_IDL (Single Query Track)

    - + Yices2 0 938266153.505266130.682938597341200200 @@ -299,7 +299,7 @@

    QF_IDL (Single Query Track)

    - + cvc5 0 886377337.494377126.155886508378252252 @@ -308,7 +308,7 @@

    QF_IDL (Single Query Track)

    - + cvc5 - fixedn 0 885377853.335377748.578885507378253253 @@ -317,7 +317,7 @@

    QF_IDL (Single Query Track)

    - + OpenSMT 0 833427074.835426975.516833503330305305 @@ -326,7 +326,7 @@

    QF_IDL (Single Query Track)

    - + OpenSMT - fixedn 0 831427604.972427547.185831502329307307 @@ -335,7 +335,7 @@

    QF_IDL (Single Query Track)

    - + veriT 0 777496124.158496051.585777453324361361 @@ -344,7 +344,7 @@

    QF_IDL (Single Query Track)

    - + MathSAT5n 0 714557276.108557277.764714396318424424 @@ -353,7 +353,7 @@

    QF_IDL (Single Query Track)

    - + SMTInterpol 0 698600641.751591207.468698396302440440 @@ -373,7 +373,7 @@

    QF_IDL (Single Query Track)

    - + YicesLS 0 62583020.43183061.32625625052461119 @@ -382,7 +382,7 @@

    QF_IDL (Single Query Track)

    - + z3n 0 62493776.9293712.17624624053461126 @@ -391,7 +391,7 @@

    QF_IDL (Single Query Track)

    - + 2019-Par4n 0 623158287.91588503.56623623054461129 @@ -400,7 +400,7 @@

    QF_IDL (Single Query Track)

    - + Yices2 0 597111234.97111214.333597597080461200 @@ -409,7 +409,7 @@

    QF_IDL (Single Query Track)

    - + cvc5 0 508257524.373257450.9655085080169461252 @@ -418,7 +418,7 @@

    QF_IDL (Single Query Track)

    - + cvc5 - fixedn 0 507257994.164257922.1075075070170461253 @@ -427,7 +427,7 @@

    QF_IDL (Single Query Track)

    - + OpenSMT 0 503251627.851251548.8995035030174461305 @@ -436,7 +436,7 @@

    QF_IDL (Single Query Track)

    - + OpenSMT - fixedn 0 502251908.123251857.6285025020175461307 @@ -445,7 +445,7 @@

    QF_IDL (Single Query Track)

    - + veriT 0 453319049.561318977.3944534530224461361 @@ -454,7 +454,7 @@

    QF_IDL (Single Query Track)

    - + MathSAT5n 0 396374614.837374621.2543963960281461424 @@ -463,7 +463,7 @@

    QF_IDL (Single Query Track)

    - + SMTInterpol 0 396390846.099385270.9793963960281461440 @@ -483,7 +483,7 @@

    QF_IDL (Single Query Track)

    - + z3n 0 38889305.68789194.281388038843707126 @@ -492,7 +492,7 @@

    QF_IDL (Single Query Track)

    - + 2019-Par4n 0 386144141.94876802.432386038645707129 @@ -501,7 +501,7 @@

    QF_IDL (Single Query Track)

    - + YicesLS 0 38468457.47968459.614384038447707119 @@ -510,7 +510,7 @@

    QF_IDL (Single Query Track)

    - + cvc5 0 37883813.12183675.19378037853707252 @@ -519,7 +519,7 @@

    QF_IDL (Single Query Track)

    - + cvc5 - fixedn 0 37883859.1783826.471378037853707253 @@ -528,7 +528,7 @@

    QF_IDL (Single Query Track)

    - + Yices2 0 341118918.534118916.35341034190707200 @@ -537,7 +537,7 @@

    QF_IDL (Single Query Track)

    - + OpenSMT 0 330139446.984139426.6173300330101707305 @@ -546,7 +546,7 @@

    QF_IDL (Single Query Track)

    - + OpenSMT - fixedn 0 329139696.849139689.5563290329102707307 @@ -555,7 +555,7 @@

    QF_IDL (Single Query Track)

    - + veriT 0 324141074.597141074.1913240324107707361 @@ -564,7 +564,7 @@

    QF_IDL (Single Query Track)

    - + MathSAT5n 0 318146661.27146656.513180318113707424 @@ -573,7 +573,7 @@

    QF_IDL (Single Query Track)

    - + SMTInterpol 0 302173795.651169936.4893020302129707440 @@ -593,7 +593,7 @@

    QF_IDL (Single Query Track)

    - + YicesLS 0 8338856.228889.543833526307305305 @@ -602,7 +602,7 @@

    QF_IDL (Single Query Track)

    - + 2019-Par4n 0 82812258.5178752.588828524304310310 @@ -611,7 +611,7 @@

    QF_IDL (Single Query Track)

    - + Yices2 0 8268876.5848852.453826529297312312 @@ -620,7 +620,7 @@

    QF_IDL (Single Query Track)

    - + z3n 0 78210654.20210612.604782497285356356 @@ -629,7 +629,7 @@

    QF_IDL (Single Query Track)

    - + OpenSMT 0 57215682.05815629.848572304268566566 @@ -638,7 +638,7 @@

    QF_IDL (Single Query Track)

    - + OpenSMT - fixedn 0 57115680.67315645.13571303268567567 @@ -647,7 +647,7 @@

    QF_IDL (Single Query Track)

    - + cvc5 0 56216190.87616159.357562285277576576 @@ -656,7 +656,7 @@

    QF_IDL (Single Query Track)

    - + cvc5 - fixedn 0 56116241.40616179.732561285276577577 @@ -665,7 +665,7 @@

    QF_IDL (Single Query Track)

    - + MathSAT5n 0 54415816.33315810.003544265279594594 @@ -674,7 +674,7 @@

    QF_IDL (Single Query Track)

    - + veriT 0 53016026.54716011.326530244286608608 @@ -683,7 +683,7 @@

    QF_IDL (Single Query Track)

    - + SMTInterpol 0 43622246.20119207.364436215221702702 @@ -707,7 +707,6 @@

    QF_IDL (Single Query Track)

    - + - diff --git a/archive/2021/results/qf-idl-unsat-core.html b/archive/2021/results/qf-idl-unsat-core.html index 73b381e9..7e8885ce 100644 --- a/archive/2021/results/qf-idl-unsat-core.html +++ b/archive/2021/results/qf-idl-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_IDL (Unsat Core Track)

    Competition results for the QF_IDL - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_IDL (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5-uccvc5-uc - - + + @@ -126,7 +126,7 @@

    QF_IDL (Unsat Core Track)

    - + cvc5-uc 0 479523 @@ -137,7 +137,7 @@

    QF_IDL (Unsat Core Track)

    - + SMTInterpol 0 456368 @@ -148,7 +148,7 @@

    QF_IDL (Unsat Core Track)

    - + MathSAT5n 0 384734 @@ -159,7 +159,7 @@

    QF_IDL (Unsat Core Track)

    - + SMTInterpol-remus 0 167393 @@ -170,7 +170,7 @@

    QF_IDL (Unsat Core Track)

    - + Yices2 0 103969 @@ -181,7 +181,7 @@

    QF_IDL (Unsat Core Track)

    - + 2020-Yices2-fixedn 0 101099 @@ -192,7 +192,7 @@

    QF_IDL (Unsat Core Track)

    - + z3n 0 28590 @@ -214,7 +214,7 @@

    QF_IDL (Unsat Core Track)

    - + cvc5-uc 0 479523104076.57104078.11782 @@ -223,7 +223,7 @@

    QF_IDL (Unsat Core Track)

    - + SMTInterpol-remus 0 457663115565.275113913.74380 @@ -232,7 +232,7 @@

    QF_IDL (Unsat Core Track)

    - + SMTInterpol 0 45636899648.12498997.91480 @@ -241,7 +241,7 @@

    QF_IDL (Unsat Core Track)

    - + MathSAT5n 0 384734111100.415111101.10990 @@ -250,7 +250,7 @@

    QF_IDL (Unsat Core Track)

    - + Yices2 0 103969112723.732112724.5886 @@ -259,7 +259,7 @@

    QF_IDL (Unsat Core Track)

    - + 2020-Yices2-fixedn 0 101099112274.918112276.35386 @@ -268,7 +268,7 @@

    QF_IDL (Unsat Core Track)

    - + z3n 0 28590116385.782116385.97396 @@ -292,7 +292,6 @@

    QF_IDL (Unsat Core Track)

    - + - diff --git a/archive/2021/results/qf-lia-cloud.html b/archive/2021/results/qf-lia-cloud.html index d0e1edf1..00604722 100644 --- a/archive/2021/results/qf-lia-cloud.html +++ b/archive/2021/results/qf-lia-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LIA (Cloud Track)

    Competition results for the QF_LIA - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    QF_LIA (Cloud Track)

    - + SMTS portfolio 0 98747.42299066 @@ -126,7 +126,7 @@

    QF_LIA (Cloud Track)

    - + SMTS cube-and-conquer 0 99194.45499066 @@ -135,7 +135,7 @@

    QF_LIA (Cloud Track)

    - + Par4n 0 514214.5165231010 @@ -144,7 +144,7 @@

    QF_LIA (Cloud Track)

    - + cvc5-gg 0 013200.00001511 @@ -164,7 +164,7 @@

    QF_LIA (Cloud Track)

    - + SMTS portfolio 0 91547.422990066 @@ -173,7 +173,7 @@

    QF_LIA (Cloud Track)

    - + SMTS cube-and-conquer 0 91994.454990066 @@ -182,7 +182,7 @@

    QF_LIA (Cloud Track)

    - + Par4n 0 28955.5232207610 @@ -191,7 +191,7 @@

    QF_LIA (Cloud Track)

    - + cvc5-gg 0 010800.00009611 @@ -211,7 +211,7 @@

    QF_LIA (Cloud Track)

    - + Par4n 0 35258.9933033910 @@ -220,7 +220,7 @@

    QF_LIA (Cloud Track)

    - + cvc5-gg 0 02400.00006911 @@ -229,7 +229,7 @@

    QF_LIA (Cloud Track)

    - + SMTS cube-and-conquer 0 07200.0000696 @@ -238,7 +238,7 @@

    QF_LIA (Cloud Track)

    - + SMTS portfolio 0 07200.0000696 @@ -258,7 +258,7 @@

    QF_LIA (Cloud Track)

    - + SMTS cube-and-conquer 0 2343.4572201313 @@ -267,7 +267,7 @@

    QF_LIA (Cloud Track)

    - + SMTS portfolio 0 1344.541101414 @@ -276,7 +276,7 @@

    QF_LIA (Cloud Track)

    - + cvc5-gg 0 0264.00001511 @@ -285,7 +285,7 @@

    QF_LIA (Cloud Track)

    - + Par4n 0 0360.00001515 @@ -309,7 +309,6 @@

    QF_LIA (Cloud Track)

    - + - diff --git a/archive/2021/results/qf-lia-incremental.html b/archive/2021/results/qf-lia-incremental.html index b5c5da9f..8eefc8f3 100644 --- a/archive/2021/results/qf-lia-incremental.html +++ b/archive/2021/results/qf-lia-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LIA (Incremental Track)

    Competition results for the QF_LIA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_LIA (Incremental Track)

    Parallel Performance Yices2 incremental - - + + @@ -124,7 +124,7 @@

    QF_LIA (Incremental Track)

    - + Yices2 incremental 0 2004033332658.64832143.39988120 @@ -133,7 +133,7 @@

    QF_LIA (Incremental Track)

    - + 2020-Yices2-fixed incrementaln 0 2004033132277.531765.37888320 @@ -142,7 +142,7 @@

    QF_LIA (Incremental Track)

    - + z3n 0 1966465052836.17452265.01137656432 @@ -151,7 +151,7 @@

    QF_LIA (Incremental Track)

    - + MathSAT5n 0 1740741240936.74440460.088263380226 @@ -160,7 +160,7 @@

    QF_LIA (Incremental Track)

    - + SMTInterpol 0 1670262058238.61555490.574333859437 @@ -169,7 +169,7 @@

    QF_LIA (Incremental Track)

    - + cvc5-inc 0 738977060927.57360812.9361265144444 @@ -178,7 +178,7 @@

    QF_LIA (Incremental Track)

    - + OpenSMT 0 59341261481.40362070.441944780247 @@ -202,7 +202,6 @@

    QF_LIA (Incremental Track)

    - + - diff --git a/archive/2021/results/qf-lia-model-validation.html b/archive/2021/results/qf-lia-model-validation.html index 1497261d..892f405d 100644 --- a/archive/2021/results/qf-lia-model-validation.html +++ b/archive/2021/results/qf-lia-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LIA (Model Validation Track)

    Competition results for the QF_LIA - + logic - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_LIA (Model Validation Track)

    Sequential PerformanceParallel Performance cvc5-mvcvc5-mv - - + + @@ -126,7 +126,7 @@

    QF_LIA (Model Validation Track)

    - + MathSAT5n 0 1830 @@ -137,7 +137,7 @@

    QF_LIA (Model Validation Track)

    - + 2020-z3n 0 1819 @@ -148,7 +148,7 @@

    QF_LIA (Model Validation Track)

    - + cvc5-mv 0 1754 @@ -159,7 +159,7 @@

    QF_LIA (Model Validation Track)

    - + z3-mvn 0 1706 @@ -170,7 +170,7 @@

    QF_LIA (Model Validation Track)

    - + 2020-Yices2-fixed Model Validationn 0 1638 @@ -181,7 +181,7 @@

    QF_LIA (Model Validation Track)

    - + 2020-Yices2 Model Validationn 0 1638 @@ -192,7 +192,7 @@

    QF_LIA (Model Validation Track)

    - + Yices2 model-validation 0 1634 @@ -203,7 +203,7 @@

    QF_LIA (Model Validation Track)

    - + SMTInterpol 0 1604 @@ -214,7 +214,7 @@

    QF_LIA (Model Validation Track)

    - + OpenSMT 0 1222 @@ -236,7 +236,7 @@

    QF_LIA (Model Validation Track)

    - + MathSAT5n 0 1830204241.777204187.06480 @@ -245,7 +245,7 @@

    QF_LIA (Model Validation Track)

    - + 2020-z3n 0 1819239192.309239033.67893 @@ -254,7 +254,7 @@

    QF_LIA (Model Validation Track)

    - + cvc5-mv 0 1754297421.676297367.13169 @@ -263,7 +263,7 @@

    QF_LIA (Model Validation Track)

    - + z3-mvn 0 1706364931.845364842.353217 @@ -272,7 +272,7 @@

    QF_LIA (Model Validation Track)

    - + 2020-Yices2 Model Validationn 0 1638376836.096376786.983285 @@ -281,7 +281,7 @@

    QF_LIA (Model Validation Track)

    - + 2020-Yices2-fixed Model Validationn 0 1638376827.861376810.512285 @@ -290,7 +290,7 @@

    QF_LIA (Model Validation Track)

    - + Yices2 model-validation 0 1634380977.076380972.16289 @@ -299,7 +299,7 @@

    QF_LIA (Model Validation Track)

    - + SMTInterpol 0 1605500189.947479097.87318 @@ -308,7 +308,7 @@

    QF_LIA (Model Validation Track)

    - + OpenSMT 0 1222897206.746976930.21228 @@ -332,7 +332,6 @@

    QF_LIA (Model Validation Track)

    - + - diff --git a/archive/2021/results/qf-lia-parallel.html b/archive/2021/results/qf-lia-parallel.html index 1d959962..3172fd20 100644 --- a/archive/2021/results/qf-lia-parallel.html +++ b/archive/2021/results/qf-lia-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LIA (Parallel Track)

    Competition results for the QF_LIA - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    QF_LIA (Parallel Track)

    - + Par4n 0 715149.6097341010 @@ -126,7 +126,7 @@

    QF_LIA (Parallel Track)

    - + cvc5-gg 0 015600.00001713 @@ -146,7 +146,7 @@

    QF_LIA (Parallel Track)

    - + Par4n 0 3593.67533001410 @@ -155,7 +155,7 @@

    QF_LIA (Parallel Track)

    - + cvc5-gg 0 03600.000031413 @@ -175,7 +175,7 @@

    QF_LIA (Parallel Track)

    - + Par4n 0 44955.93440421110 @@ -184,7 +184,7 @@

    QF_LIA (Parallel Track)

    - + cvc5-gg 0 02400.000061113 @@ -204,7 +204,7 @@

    QF_LIA (Parallel Track)

    - + cvc5-gg 0 0312.00001713 @@ -213,7 +213,7 @@

    QF_LIA (Parallel Track)

    - + Par4n 0 0408.00001717 @@ -237,7 +237,6 @@

    QF_LIA (Parallel Track)

    - + - diff --git a/archive/2021/results/qf-lia-single-query.html b/archive/2021/results/qf-lia-single-query.html index 0367d0f4..055188dc 100644 --- a/archive/2021/results/qf-lia-single-query.html +++ b/archive/2021/results/qf-lia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LIA (Single Query Track)

    Competition results for the QF_LIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_LIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_LIA (Single Query Track)

    - + 2019-Par4n 0 3224 @@ -142,7 +142,7 @@

    QF_LIA (Single Query Track)

    - + MathSAT5n 0 3082 @@ -153,7 +153,7 @@

    QF_LIA (Single Query Track)

    - + cvc5 0 3001 @@ -164,7 +164,7 @@

    QF_LIA (Single Query Track)

    - + cvc5 - fixedn 0 3001 @@ -175,7 +175,7 @@

    QF_LIA (Single Query Track)

    - + Yices2 0 2842 @@ -186,7 +186,7 @@

    QF_LIA (Single Query Track)

    - + z3n 0 2829 @@ -197,7 +197,7 @@

    QF_LIA (Single Query Track)

    - + SMTInterpol 0 2490 @@ -208,7 +208,7 @@

    QF_LIA (Single Query Track)

    - + OpenSMT - fixedn 0 2339 @@ -219,7 +219,7 @@

    QF_LIA (Single Query Track)

    - + veriT 0 1299 @@ -230,7 +230,7 @@

    QF_LIA (Single Query Track)

    - + OpenSMT 8 2334 @@ -252,7 +252,7 @@

    QF_LIA (Single Query Track)

    - + 2019-Par4n 0 3245156602.866105403.5233245184314026666 @@ -261,7 +261,7 @@

    QF_LIA (Single Query Track)

    - + MathSAT5n 0 3082536970.141536881.502308217421340229229 @@ -270,7 +270,7 @@

    QF_LIA (Single Query Track)

    - + cvc5 0 3001680962.124680822.989300116551346310307 @@ -279,7 +279,7 @@

    QF_LIA (Single Query Track)

    - + cvc5 - fixedn 0 3001681001.968680842.722300116551346310307 @@ -288,7 +288,7 @@

    QF_LIA (Single Query Track)

    - + Yices2 0 2842618245.555618216.521284215271315469469 @@ -297,7 +297,7 @@

    QF_LIA (Single Query Track)

    - + z3n 0 2829728690.26728704.812282916261203482482 @@ -306,7 +306,7 @@

    QF_LIA (Single Query Track)

    - + SMTInterpol 0 24911174524.8671139164.987249114751016820820 @@ -315,7 +315,7 @@

    QF_LIA (Single Query Track)

    - + OpenSMT - fixedn 0 23391299578.0521385681.68233911731166972457 @@ -324,7 +324,7 @@

    QF_LIA (Single Query Track)

    - + veriT 0 12991980166.5561980140.878129984345620121563 @@ -333,7 +333,7 @@

    QF_LIA (Single Query Track)

    - + OpenSMT 8 23341298731.181380971.075233411761158977478 @@ -353,7 +353,7 @@

    QF_LIA (Single Query Track)

    - + 2019-Par4n 0 1843104257.83764577.14818431843037143166 @@ -362,7 +362,7 @@

    QF_LIA (Single Query Track)

    - + MathSAT5n 0 1742278003.608277909.0821742174201381431229 @@ -371,7 +371,7 @@

    QF_LIA (Single Query Track)

    - + cvc5 0 1655418306.816418227.791655165502251431307 @@ -380,7 +380,7 @@

    QF_LIA (Single Query Track)

    - + cvc5 - fixedn 0 1655418442.901418286.1091655165502251431307 @@ -389,7 +389,7 @@

    QF_LIA (Single Query Track)

    - + z3n 0 1626413790.05413868.5211626162602541431482 @@ -398,7 +398,7 @@

    QF_LIA (Single Query Track)

    - + Yices2 0 1527471192.102471171.0381527152703531431469 @@ -407,7 +407,7 @@

    QF_LIA (Single Query Track)

    - + SMTInterpol 0 1475619713.965598484.731475147504051431820 @@ -416,7 +416,7 @@

    QF_LIA (Single Query Track)

    - + OpenSMT - fixedn 0 1173924678.7781003866.8631173117307071431457 @@ -425,7 +425,7 @@

    QF_LIA (Single Query Track)

    - + veriT 0 843886446.424886415.6768438430103714311563 @@ -434,7 +434,7 @@

    QF_LIA (Single Query Track)

    - + OpenSMT 8 1176924117.216999611.171176117607041431478 @@ -454,7 +454,7 @@

    QF_LIA (Single Query Track)

    - + 2019-Par4n 0 140239145.02927626.37514020140218189166 @@ -463,7 +463,7 @@

    QF_LIA (Single Query Track)

    - + cvc5 - fixedn 0 1346249359.067249356.613134601346741891307 @@ -472,7 +472,7 @@

    QF_LIA (Single Query Track)

    - + cvc5 0 1346249455.309249395.198134601346741891307 @@ -481,7 +481,7 @@

    QF_LIA (Single Query Track)

    - + MathSAT5n 0 1340245766.532245772.421134001340801891229 @@ -490,7 +490,7 @@

    QF_LIA (Single Query Track)

    - + Yices2 0 1315133853.453133845.4831315013151051891469 @@ -499,7 +499,7 @@

    QF_LIA (Single Query Track)

    - + z3n 0 1203301700.21301636.2911203012032171891482 @@ -508,7 +508,7 @@

    QF_LIA (Single Query Track)

    - + OpenSMT - fixedn 0 1166361699.273368614.8161166011662541891457 @@ -517,7 +517,7 @@

    QF_LIA (Single Query Track)

    - + OpenSMT 0 1158361413.964368159.9041158011582621891478 @@ -526,7 +526,7 @@

    QF_LIA (Single Query Track)

    - + SMTInterpol 0 1016541610.901527480.2571016010164041891820 @@ -535,7 +535,7 @@

    QF_LIA (Single Query Track)

    - + veriT 0 4561080520.1321080525.202456045696418911563 @@ -555,7 +555,7 @@

    QF_LIA (Single Query Track)

    - + 2019-Par4n 0 315210270.2486353.096315217731379159159 @@ -564,7 +564,7 @@

    QF_LIA (Single Query Track)

    - + Yices2 0 265918846.00418812.129265913751284652652 @@ -573,7 +573,7 @@

    QF_LIA (Single Query Track)

    - + z3n 0 211935415.88435335.4492119116195811921192 @@ -582,7 +582,7 @@

    QF_LIA (Single Query Track)

    - + MathSAT5n 0 199436509.84536435.3461994120379113171317 @@ -591,7 +591,7 @@

    QF_LIA (Single Query Track)

    - + cvc5 0 163243844.01443754.037163295967316791679 @@ -600,7 +600,7 @@

    QF_LIA (Single Query Track)

    - + cvc5 - fixedn 0 163143821.8943754.436163195967216801680 @@ -609,7 +609,7 @@

    QF_LIA (Single Query Track)

    - + SMTInterpol 0 152359858.50650010.059152385966417881788 @@ -618,7 +618,7 @@

    QF_LIA (Single Query Track)

    - + OpenSMT - fixedn 0 127955577.39455501.204127957370620322028 @@ -627,7 +627,7 @@

    QF_LIA (Single Query Track)

    - + veriT 0 102548910.31648876.015102569832722861919 @@ -636,7 +636,7 @@

    QF_LIA (Single Query Track)

    - + OpenSMT 8 127555431.84455378.592127557669920362024 @@ -660,7 +660,6 @@

    QF_LIA (Single Query Track)

    - + - diff --git a/archive/2021/results/qf-lia-unsat-core.html b/archive/2021/results/qf-lia-unsat-core.html index 05868446..c27d7ea5 100644 --- a/archive/2021/results/qf-lia-unsat-core.html +++ b/archive/2021/results/qf-lia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LIA (Unsat Core Track)

    Competition results for the QF_LIA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_LIA (Unsat Core Track)

    Sequential PerformanceParallel Performance Yices2Yices2 - - + + @@ -126,7 +126,7 @@

    QF_LIA (Unsat Core Track)

    - + Yices2 0 945405 @@ -137,7 +137,7 @@

    QF_LIA (Unsat Core Track)

    - + 2020-Yices2-fixedn 0 945405 @@ -148,7 +148,7 @@

    QF_LIA (Unsat Core Track)

    - + z3n 0 944017 @@ -159,7 +159,7 @@

    QF_LIA (Unsat Core Track)

    - + cvc5-uc 0 924885 @@ -170,7 +170,7 @@

    QF_LIA (Unsat Core Track)

    - + MathSAT5n 0 924346 @@ -181,7 +181,7 @@

    QF_LIA (Unsat Core Track)

    - + SMTInterpol 0 811034 @@ -192,7 +192,7 @@

    QF_LIA (Unsat Core Track)

    - + SMTInterpol-remus 0 199339 @@ -214,7 +214,7 @@

    QF_LIA (Unsat Core Track)

    - + Yices2 0 94540533118.77733120.20224 @@ -223,7 +223,7 @@

    QF_LIA (Unsat Core Track)

    - + 2020-Yices2-fixedn 0 94540533133.37633120.7224 @@ -232,7 +232,7 @@

    QF_LIA (Unsat Core Track)

    - + z3n 0 94401770722.8270723.08857 @@ -241,7 +241,7 @@

    QF_LIA (Unsat Core Track)

    - + cvc5-uc 0 924885107686.147107661.33283 @@ -250,7 +250,7 @@

    QF_LIA (Unsat Core Track)

    - + MathSAT5n 0 924346106943.181106943.22889 @@ -259,7 +259,7 @@

    QF_LIA (Unsat Core Track)

    - + SMTInterpol 0 81103435403.42134340.35322 @@ -268,7 +268,7 @@

    QF_LIA (Unsat Core Track)

    - + SMTInterpol-remus 0 277415149370.985143865.12833 @@ -292,7 +292,6 @@

    QF_LIA (Unsat Core Track)

    - + - diff --git a/archive/2021/results/qf-linearintarith-cloud.html b/archive/2021/results/qf-linearintarith-cloud.html index c4c97ecf..dbfa502a 100644 --- a/archive/2021/results/qf-linearintarith-cloud.html +++ b/archive/2021/results/qf-linearintarith-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LinearIntArith (Cloud Track)

    Competition results for the QF_LinearIntArith - + division - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    QF_LinearIntArith (Cloud Track)

    - + Par4n 0 1130274.421115620020 @@ -126,7 +126,7 @@

    QF_LinearIntArith (Cloud Track)

    - + SMTS cube-and-conquer 0 1027656.0461010021021 @@ -135,7 +135,7 @@

    QF_LinearIntArith (Cloud Track)

    - + SMTS portfolio 0 927947.42299022022 @@ -144,7 +144,7 @@

    QF_LinearIntArith (Cloud Track)

    - + cvc5-gg 0 032400.000031027 @@ -164,7 +164,7 @@

    QF_LinearIntArith (Cloud Track)

    - + SMTS cube-and-conquer 0 108456.0461010051621 @@ -173,7 +173,7 @@

    QF_LinearIntArith (Cloud Track)

    - + SMTS portfolio 0 98747.42299061622 @@ -182,7 +182,7 @@

    QF_LinearIntArith (Cloud Track)

    - + Par4n 0 514410.91550101620 @@ -191,7 +191,7 @@

    QF_LinearIntArith (Cloud Track)

    - + cvc5-gg 0 018000.0000151627 @@ -211,7 +211,7 @@

    QF_LinearIntArith (Cloud Track)

    - + Par4n 0 69863.51160652020 @@ -220,7 +220,7 @@

    QF_LinearIntArith (Cloud Track)

    - + cvc5-gg 0 08400.0000112027 @@ -229,7 +229,7 @@

    QF_LinearIntArith (Cloud Track)

    - + SMTS cube-and-conquer 0 013200.0000112021 @@ -238,7 +238,7 @@

    QF_LinearIntArith (Cloud Track)

    - + SMTS portfolio 0 013200.0000112022 @@ -258,7 +258,7 @@

    QF_LinearIntArith (Cloud Track)

    - + SMTS cube-and-conquer 0 2727.45722029029 @@ -267,7 +267,7 @@

    QF_LinearIntArith (Cloud Track)

    - + SMTS portfolio 0 1728.5411030030 @@ -276,7 +276,7 @@

    QF_LinearIntArith (Cloud Track)

    - + cvc5-gg 0 0648.000031027 @@ -285,7 +285,7 @@

    QF_LinearIntArith (Cloud Track)

    - + Par4n 0 0744.000031031 @@ -309,7 +309,6 @@

    QF_LinearIntArith (Cloud Track)

    - + - diff --git a/archive/2021/results/qf-linearintarith-incremental.html b/archive/2021/results/qf-linearintarith-incremental.html index 22b0bd9a..294c11eb 100644 --- a/archive/2021/results/qf-linearintarith-incremental.html +++ b/archive/2021/results/qf-linearintarith-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LinearIntArith (Incremental Track)

    Competition results for the QF_LinearIntArith - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_LinearIntArith (Incremental Track)

    Parallel Performance Yices2 incremental - - + + @@ -124,7 +124,7 @@

    QF_LinearIntArith (Incremental Track)

    - + Yices2 incremental 0 2004033332658.64832143.399881020 @@ -133,7 +133,7 @@

    QF_LinearIntArith (Incremental Track)

    - + 2020-Yices2-fixed incrementaln 0 2004033132277.531765.378883020 @@ -142,7 +142,7 @@

    QF_LinearIntArith (Incremental Track)

    - + z3n 0 1966465052836.17452265.011376564032 @@ -151,7 +151,7 @@

    QF_LinearIntArith (Incremental Track)

    - + MathSAT5n 0 1740741240936.74440460.0882633802026 @@ -160,7 +160,7 @@

    QF_LinearIntArith (Incremental Track)

    - + SMTInterpol 0 1670262058238.61555490.5743338594037 @@ -169,7 +169,7 @@

    QF_LinearIntArith (Incremental Track)

    - + cvc5-inc 0 738977060927.57360812.93612651444044 @@ -178,7 +178,7 @@

    QF_LinearIntArith (Incremental Track)

    - + OpenSMT 0 59341261481.40362070.4419447802047 @@ -202,7 +202,6 @@

    QF_LinearIntArith (Incremental Track)

    - + - diff --git a/archive/2021/results/qf-linearintarith-model-validation.html b/archive/2021/results/qf-linearintarith-model-validation.html index d785573f..f6bd610e 100644 --- a/archive/2021/results/qf-linearintarith-model-validation.html +++ b/archive/2021/results/qf-linearintarith-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LinearIntArith (Model Validation Track)

    Competition results for the QF_LinearIntArith - + division - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_LinearIntArith (Model Validation Track)

    Sequential PerformanceParallel Performance cvc5-mvcvc5-mv - - + + @@ -126,7 +126,7 @@

    QF_LinearIntArith (Model Validation Track)

    - + 2020-z3n 0 2449 @@ -137,7 +137,7 @@

    QF_LinearIntArith (Model Validation Track)

    - + z3-mvn 0 2341 @@ -148,7 +148,7 @@

    QF_LinearIntArith (Model Validation Track)

    - + cvc5-mv 0 2307 @@ -159,7 +159,7 @@

    QF_LinearIntArith (Model Validation Track)

    - + 2020-Yices2-fixed Model Validationn 0 2264 @@ -170,7 +170,7 @@

    QF_LinearIntArith (Model Validation Track)

    - + 2020-Yices2 Model Validationn 0 2264 @@ -181,7 +181,7 @@

    QF_LinearIntArith (Model Validation Track)

    - + Yices2 model-validation 0 2259 @@ -192,7 +192,7 @@

    QF_LinearIntArith (Model Validation Track)

    - + MathSAT5n 0 2252 @@ -203,7 +203,7 @@

    QF_LinearIntArith (Model Validation Track)

    - + SMTInterpol 0 2042 @@ -214,7 +214,7 @@

    QF_LinearIntArith (Model Validation Track)

    - + OpenSMT 0 1752 @@ -225,7 +225,7 @@

    QF_LinearIntArith (Model Validation Track)

    - + YicesLS 0 646 @@ -247,7 +247,7 @@

    QF_LinearIntArith (Model Validation Track)

    - + 2020-z3n 0 2449331961.694331792.963151 @@ -256,7 +256,7 @@

    QF_LinearIntArith (Model Validation Track)

    - + z3-mvn 0 2341457203.268457077.756274 @@ -265,7 +265,7 @@

    QF_LinearIntArith (Model Validation Track)

    - + cvc5-mv 0 2307530266.721530203.05308 @@ -274,7 +274,7 @@

    QF_LinearIntArith (Model Validation Track)

    - + 2020-Yices2 Model Validationn 0 2264469342.955469240.789351 @@ -283,7 +283,7 @@

    QF_LinearIntArith (Model Validation Track)

    - + 2020-Yices2-fixed Model Validationn 0 2264469300.027469273.246351 @@ -292,7 +292,7 @@

    QF_LinearIntArith (Model Validation Track)

    - + Yices2 model-validation 0 2259475198.484475155.116356 @@ -301,7 +301,7 @@

    QF_LinearIntArith (Model Validation Track)

    - + MathSAT5n 0 2252557681.151557616.019340 @@ -310,7 +310,7 @@

    QF_LinearIntArith (Model Validation Track)

    - + SMTInterpol 0 2045867407.81840160.806570 @@ -319,7 +319,7 @@

    QF_LinearIntArith (Model Validation Track)

    - + OpenSMT 0 17521133519.2681213191.6389 @@ -328,7 +328,7 @@

    QF_LinearIntArith (Model Validation Track)

    - + YicesLS 0 64688774.95588692.4234 @@ -352,7 +352,6 @@

    QF_LinearIntArith (Model Validation Track)

    - + - diff --git a/archive/2021/results/qf-linearintarith-parallel.html b/archive/2021/results/qf-linearintarith-parallel.html index 63d4e67a..053dda78 100644 --- a/archive/2021/results/qf-linearintarith-parallel.html +++ b/archive/2021/results/qf-linearintarith-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LinearIntArith (Parallel Track)

    Competition results for the QF_LinearIntArith - + division - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    QF_LinearIntArith (Parallel Track)

    - + Par4n 0 1431680.903146820020 @@ -126,7 +126,7 @@

    QF_LinearIntArith (Parallel Track)

    - + cvc5-gg 0 036000.000034030 @@ -146,7 +146,7 @@

    QF_LinearIntArith (Parallel Track)

    - + Par4n 0 65839.41566032520 @@ -155,7 +155,7 @@

    QF_LinearIntArith (Parallel Track)

    - + cvc5-gg 0 010800.000092530 @@ -175,7 +175,7 @@

    QF_LinearIntArith (Parallel Track)

    - + Par4n 0 810241.48880842220 @@ -184,7 +184,7 @@

    QF_LinearIntArith (Parallel Track)

    - + cvc5-gg 0 09600.0000122230 @@ -204,7 +204,7 @@

    QF_LinearIntArith (Parallel Track)

    - + cvc5-gg 0 0720.000034030 @@ -213,7 +213,7 @@

    QF_LinearIntArith (Parallel Track)

    - + Par4n 0 0816.000034034 @@ -237,7 +237,6 @@

    QF_LinearIntArith (Parallel Track)

    - + - diff --git a/archive/2021/results/qf-linearintarith-single-query.html b/archive/2021/results/qf-linearintarith-single-query.html index 96126341..2d595461 100644 --- a/archive/2021/results/qf-linearintarith-single-query.html +++ b/archive/2021/results/qf-linearintarith-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LinearIntArith (Single Query Track)

    Competition results for the QF_LinearIntArith - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_LinearIntArith (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_LinearIntArith (Single Query Track)

    - + 2019-Par4n 0 4188 @@ -142,7 +142,7 @@

    QF_LinearIntArith (Single Query Track)

    - + cvc5 0 3893 @@ -153,7 +153,7 @@

    QF_LinearIntArith (Single Query Track)

    - + cvc5 - fixedn 0 3892 @@ -164,7 +164,7 @@

    QF_LinearIntArith (Single Query Track)

    - + z3n 0 3847 @@ -175,7 +175,7 @@

    QF_LinearIntArith (Single Query Track)

    - + MathSAT5n 0 3801 @@ -186,7 +186,7 @@

    QF_LinearIntArith (Single Query Track)

    - + Yices2 0 3786 @@ -197,7 +197,7 @@

    QF_LinearIntArith (Single Query Track)

    - + SMTInterpol 0 3192 @@ -208,7 +208,7 @@

    QF_LinearIntArith (Single Query Track)

    - + OpenSMT - fixedn 0 3170 @@ -219,7 +219,7 @@

    QF_LinearIntArith (Single Query Track)

    - + veriT 0 2081 @@ -230,7 +230,7 @@

    QF_LinearIntArith (Single Query Track)

    - + YicesLS 0 1009 @@ -241,7 +241,7 @@

    QF_LinearIntArith (Single Query Track)

    - + OpenSMT 8 3167 @@ -263,7 +263,7 @@

    QF_LinearIntArith (Single Query Track)

    - + 2019-Par4n 0 4261495273.288306830.6284261246717941950195 @@ -272,7 +272,7 @@

    QF_LinearIntArith (Single Query Track)

    - + cvc5 0 38931060600.3471060250.2463893216417295630560 @@ -281,7 +281,7 @@

    QF_LinearIntArith (Single Query Track)

    - + cvc5 - fixedn 0 38921061170.0311060906.1153892216317295640561 @@ -290,7 +290,7 @@

    QF_LinearIntArith (Single Query Track)

    - + z3n 0 3847949089.789948928.2013847225115966090609 @@ -299,7 +299,7 @@

    QF_LinearIntArith (Single Query Track)

    - + MathSAT5n 0 38011096655.6421096568.6643801213916626550655 @@ -308,7 +308,7 @@

    QF_LinearIntArith (Single Query Track)

    - + Yices2 0 3786885718.925885667.0893786212516616700670 @@ -317,7 +317,7 @@

    QF_LinearIntArith (Single Query Track)

    - + SMTInterpol 0 31931778897.5931734048.914319318721321126301263 @@ -326,7 +326,7 @@

    QF_LinearIntArith (Single Query Track)

    - + OpenSMT - fixedn 0 31701727183.0231813228.86431701675149512797764 @@ -335,7 +335,7 @@

    QF_LinearIntArith (Single Query Track)

    - + veriT 0 20812478696.0932478597.84120811297784237501926 @@ -344,7 +344,7 @@

    QF_LinearIntArith (Single Query Track)

    - + YicesLS 0 1009187477.24187520.93510096253841293318119 @@ -353,7 +353,7 @@

    QF_LinearIntArith (Single Query Track)

    - + OpenSMT 8 31671725806.0151807946.59131671679148812827783 @@ -373,7 +373,7 @@

    QF_LinearIntArith (Single Query Track)

    - + 2019-Par4n 0 2467262545.759153080.982246724670911898195 @@ -382,7 +382,7 @@

    QF_LinearIntArith (Single Query Track)

    - + z3n 0 2251507567.266507580.9872251225103071898609 @@ -391,7 +391,7 @@

    QF_LinearIntArith (Single Query Track)

    - + cvc5 0 2164675832.559675680.1262164216403941898560 @@ -400,7 +400,7 @@

    QF_LinearIntArith (Single Query Track)

    - + cvc5 - fixedn 0 2163676438.455676209.6052163216303951898561 @@ -409,7 +409,7 @@

    QF_LinearIntArith (Single Query Track)

    - + MathSAT5n 0 2139652618.686652530.5792139213904191898655 @@ -418,7 +418,7 @@

    QF_LinearIntArith (Single Query Track)

    - + Yices2 0 2125582427.159582385.4582125212504331898670 @@ -427,7 +427,7 @@

    QF_LinearIntArith (Single Query Track)

    - + SMTInterpol 0 18721010578.416983761.71618721872068618981263 @@ -436,7 +436,7 @@

    QF_LinearIntArith (Single Query Track)

    - + OpenSMT - fixedn 0 16751176586.9021255724.4921675167508821899764 @@ -445,7 +445,7 @@

    QF_LinearIntArith (Single Query Track)

    - + veriT 0 12971205498.3741205395.459129712970126118981926 @@ -454,7 +454,7 @@

    QF_LinearIntArith (Single Query Track)

    - + YicesLS 0 62583020.43183061.326256250523779119 @@ -463,7 +463,7 @@

    QF_LinearIntArith (Single Query Track)

    - + OpenSMT 8 16791175745.0671251160.0691679167908781899783 @@ -483,7 +483,7 @@

    QF_LinearIntArith (Single Query Track)

    - + 2019-Par4n 0 1794183527.53104549.647179401794632599195 @@ -492,7 +492,7 @@

    QF_LinearIntArith (Single Query Track)

    - + cvc5 0 1729335567.788335370.121729017291282599560 @@ -501,7 +501,7 @@

    QF_LinearIntArith (Single Query Track)

    - + cvc5 - fixedn 0 1729335531.576335496.511729017291282599561 @@ -510,7 +510,7 @@

    QF_LinearIntArith (Single Query Track)

    - + MathSAT5n 0 1662394836.956394838.0851662016621952599655 @@ -519,7 +519,7 @@

    QF_LinearIntArith (Single Query Track)

    - + Yices2 0 1661254091.766254081.6311661016611962599670 @@ -528,7 +528,7 @@

    QF_LinearIntArith (Single Query Track)

    - + z3n 0 1596392322.524392147.2151596015962612599609 @@ -537,7 +537,7 @@

    QF_LinearIntArith (Single Query Track)

    - + OpenSMT - fixedn 0 1495501396.122508304.3731495014953562605764 @@ -546,7 +546,7 @@

    QF_LinearIntArith (Single Query Track)

    - + OpenSMT 0 1488500860.947507586.5221488014883632605783 @@ -555,7 +555,7 @@

    QF_LinearIntArith (Single Query Track)

    - + SMTInterpol 0 1321719119.177701087.19813210132153625991263 @@ -564,7 +564,7 @@

    QF_LinearIntArith (Single Query Track)

    - + veriT 0 7841223997.721224002.3827840784107325991926 @@ -573,7 +573,7 @@

    QF_LinearIntArith (Single Query Track)

    - + YicesLS 0 38468457.47968459.6143840384474025119 @@ -593,7 +593,7 @@

    QF_LinearIntArith (Single Query Track)

    - + 2019-Par4n 0 398522581.40315156.6023985229816874710471 @@ -602,7 +602,7 @@

    QF_LinearIntArith (Single Query Track)

    - + Yices2 0 349027771.00827713.0033490190515859660966 @@ -611,7 +611,7 @@

    QF_LinearIntArith (Single Query Track)

    - + z3n 0 290646120.65845998.626290616591247155001550 @@ -620,7 +620,7 @@

    QF_LinearIntArith (Single Query Track)

    - + MathSAT5n 0 254352383.57252302.746254314691074191301913 @@ -629,7 +629,7 @@

    QF_LinearIntArith (Single Query Track)

    - + cvc5 0 219960093.14859971.65621991245954225702257 @@ -638,7 +638,7 @@

    QF_LinearIntArith (Single Query Track)

    - + cvc5 - fixedn 0 219760121.60459992.47321971245952225902259 @@ -647,7 +647,7 @@

    QF_LinearIntArith (Single Query Track)

    - + SMTInterpol 0 196282234.84669324.66319621075887249402494 @@ -656,7 +656,7 @@

    QF_LinearIntArith (Single Query Track)

    - + OpenSMT - fixedn 0 185071258.06771146.3341850876974259972595 @@ -665,7 +665,7 @@

    QF_LinearIntArith (Single Query Track)

    - + veriT 0 156064990.24264940.7191560943617289602529 @@ -674,7 +674,7 @@

    QF_LinearIntArith (Single Query Track)

    - + YicesLS 0 8338856.228889.5438335263073053318305 @@ -683,7 +683,7 @@

    QF_LinearIntArith (Single Query Track)

    - + OpenSMT 8 184771113.90271008.4391847880967260272590 @@ -707,7 +707,6 @@

    QF_LinearIntArith (Single Query Track)

    - + - diff --git a/archive/2021/results/qf-linearintarith-unsat-core.html b/archive/2021/results/qf-linearintarith-unsat-core.html index cdced092..27ffd207 100644 --- a/archive/2021/results/qf-linearintarith-unsat-core.html +++ b/archive/2021/results/qf-linearintarith-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LinearIntArith (Unsat Core Track)

    Competition results for the QF_LinearIntArith - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_LinearIntArith (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5-uccvc5-uc - - + + @@ -126,7 +126,7 @@

    QF_LinearIntArith (Unsat Core Track)

    - + cvc5-uc 0 1404408 @@ -137,7 +137,7 @@

    QF_LinearIntArith (Unsat Core Track)

    - + MathSAT5n 0 1309080 @@ -148,7 +148,7 @@

    QF_LinearIntArith (Unsat Core Track)

    - + SMTInterpol 0 1267402 @@ -159,7 +159,7 @@

    QF_LinearIntArith (Unsat Core Track)

    - + Yices2 0 1049374 @@ -170,7 +170,7 @@

    QF_LinearIntArith (Unsat Core Track)

    - + 2020-Yices2-fixedn 0 1046504 @@ -181,7 +181,7 @@

    QF_LinearIntArith (Unsat Core Track)

    - + z3n 0 972607 @@ -192,7 +192,7 @@

    QF_LinearIntArith (Unsat Core Track)

    - + SMTInterpol-remus 0 366734 @@ -214,7 +214,7 @@

    QF_LinearIntArith (Unsat Core Track)

    - + cvc5-uc 0 1404408212975.801212952.726165 @@ -223,7 +223,7 @@

    QF_LinearIntArith (Unsat Core Track)

    - + MathSAT5n 0 1309080219263.555219264.296179 @@ -232,7 +232,7 @@

    QF_LinearIntArith (Unsat Core Track)

    - + SMTInterpol 0 1267402137566.657135809.774104 @@ -241,7 +241,7 @@

    QF_LinearIntArith (Unsat Core Track)

    - + Yices2 0 1049374146008.083146010.401110 @@ -250,7 +250,7 @@

    QF_LinearIntArith (Unsat Core Track)

    - + 2020-Yices2-fixedn 0 1046504145576.629145565.43110 @@ -259,7 +259,7 @@

    QF_LinearIntArith (Unsat Core Track)

    - + z3n 0 972607188313.528188313.988154 @@ -268,7 +268,7 @@

    QF_LinearIntArith (Unsat Core Track)

    - + SMTInterpol-remus 0 735080267669.222260436.511115 @@ -292,7 +292,6 @@

    QF_LinearIntArith (Unsat Core Track)

    - + - diff --git a/archive/2021/results/qf-linearrealarith-cloud.html b/archive/2021/results/qf-linearrealarith-cloud.html index fe475e66..d06ce68d 100644 --- a/archive/2021/results/qf-linearrealarith-cloud.html +++ b/archive/2021/results/qf-linearrealarith-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LinearRealArith (Cloud Track)

    Competition results for the QF_LinearRealArith - + division - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    QF_LinearRealArith (Cloud Track)

    - + SMTS portfolio 0 1225828.708125719019 @@ -126,7 +126,7 @@

    QF_LinearRealArith (Cloud Track)

    - + SMTS cube-and-conquer 0 1226513.236125719019 @@ -135,7 +135,7 @@

    QF_LinearRealArith (Cloud Track)

    - + Par4n 0 119415.43511655155 @@ -144,7 +144,7 @@

    QF_LinearRealArith (Cloud Track)

    - + cvc5-gg 0 434591.22541327027 @@ -164,7 +164,7 @@

    QF_LinearRealArith (Cloud Track)

    - + Par4n 0 63936.2946602235 @@ -173,7 +173,7 @@

    QF_LinearRealArith (Cloud Track)

    - + SMTS portfolio 0 55581.36555032319 @@ -182,7 +182,7 @@

    QF_LinearRealArith (Cloud Track)

    - + SMTS cube-and-conquer 0 55683.29255032319 @@ -191,7 +191,7 @@

    QF_LinearRealArith (Cloud Track)

    - + cvc5-gg 0 18941.43711072327 @@ -211,7 +211,7 @@

    QF_LinearRealArith (Cloud Track)

    - + SMTS portfolio 0 72247.34370712319 @@ -220,7 +220,7 @@

    QF_LinearRealArith (Cloud Track)

    - + SMTS cube-and-conquer 0 72829.94470712319 @@ -229,7 +229,7 @@

    QF_LinearRealArith (Cloud Track)

    - + Par4n 0 54279.1415052245 @@ -238,7 +238,7 @@

    QF_LinearRealArith (Cloud Track)

    - + cvc5-gg 0 37649.78830352327 @@ -258,7 +258,7 @@

    QF_LinearRealArith (Cloud Track)

    - + Par4n 0 4315.263431121512 @@ -267,7 +267,7 @@

    QF_LinearRealArith (Cloud Track)

    - + SMTS cube-and-conquer 0 4655.22543127027 @@ -276,7 +276,7 @@

    QF_LinearRealArith (Cloud Track)

    - + SMTS portfolio 0 4660.70343127027 @@ -285,7 +285,7 @@

    QF_LinearRealArith (Cloud Track)

    - + cvc5-gg 0 1729.60410130030 @@ -309,7 +309,6 @@

    QF_LinearRealArith (Cloud Track)

    - + - diff --git a/archive/2021/results/qf-linearrealarith-incremental.html b/archive/2021/results/qf-linearrealarith-incremental.html index 13f72043..ec04f63b 100644 --- a/archive/2021/results/qf-linearrealarith-incremental.html +++ b/archive/2021/results/qf-linearrealarith-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LinearRealArith (Incremental Track)

    Competition results for the QF_LinearRealArith - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_LinearRealArith (Incremental Track)

    Parallel Performance OpenSMT - - + + @@ -124,7 +124,7 @@

    QF_LinearRealArith (Incremental Track)

    - + 2018-MathSAT (incremental)n 0 12638695.2318694.98725205 @@ -133,7 +133,7 @@

    QF_LinearRealArith (Incremental Track)

    - + MathSAT5n 0 94910527.16110527.66856608 @@ -142,7 +142,7 @@

    QF_LinearRealArith (Incremental Track)

    - + OpenSMT 0 9429755.3859754.02157306 @@ -151,7 +151,7 @@

    QF_LinearRealArith (Incremental Track)

    - + Yices2 incremental 0 91412000.012000.0601010 @@ -160,7 +160,7 @@

    QF_LinearRealArith (Incremental Track)

    - + cvc5-inc 0 67212000.012000.0843010 @@ -169,7 +169,7 @@

    QF_LinearRealArith (Incremental Track)

    - + SMTInterpol 0 61112000.012000.0904010 @@ -178,7 +178,7 @@

    QF_LinearRealArith (Incremental Track)

    - + z3n 0 58612000.012000.0929010 @@ -202,7 +202,6 @@

    QF_LinearRealArith (Incremental Track)

    - + - diff --git a/archive/2021/results/qf-linearrealarith-model-validation.html b/archive/2021/results/qf-linearrealarith-model-validation.html index 6085412b..bedf1f82 100644 --- a/archive/2021/results/qf-linearrealarith-model-validation.html +++ b/archive/2021/results/qf-linearrealarith-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LinearRealArith (Model Validation Track)

    Competition results for the QF_LinearRealArith - + division - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_LinearRealArith (Model Validation Track)

    Sequential PerformanceParallel Performance Yices2 model-validationYices2 model-validation - - + + @@ -126,7 +126,7 @@

    QF_LinearRealArith (Model Validation Track)

    - + Yices2 model-validation 0 567 @@ -137,7 +137,7 @@

    QF_LinearRealArith (Model Validation Track)

    - + OpenSMT 0 564 @@ -148,7 +148,7 @@

    QF_LinearRealArith (Model Validation Track)

    - + z3-mvn 0 563 @@ -159,7 +159,7 @@

    QF_LinearRealArith (Model Validation Track)

    - + 2020-Yices2-fixed Model Validationn 0 560 @@ -170,7 +170,7 @@

    QF_LinearRealArith (Model Validation Track)

    - + cvc5-mv 0 559 @@ -181,7 +181,7 @@

    QF_LinearRealArith (Model Validation Track)

    - + SMTInterpol 0 543 @@ -192,7 +192,7 @@

    QF_LinearRealArith (Model Validation Track)

    - + MathSAT5n 0 539 @@ -203,7 +203,7 @@

    QF_LinearRealArith (Model Validation Track)

    - + 2020-OpenSMTn 0 464 @@ -225,7 +225,7 @@

    QF_LinearRealArith (Model Validation Track)

    - + Yices2 model-validation 0 56726931.90826924.60215 @@ -234,7 +234,7 @@

    QF_LinearRealArith (Model Validation Track)

    - + OpenSMT 0 56438608.12538618.96818 @@ -243,7 +243,7 @@

    QF_LinearRealArith (Model Validation Track)

    - + z3-mvn 0 56349214.76549210.39319 @@ -252,7 +252,7 @@

    QF_LinearRealArith (Model Validation Track)

    - + 2020-Yices2-fixed Model Validationn 0 56032931.43732932.29422 @@ -261,7 +261,7 @@

    QF_LinearRealArith (Model Validation Track)

    - + cvc5-mv 0 55951832.76451821.7523 @@ -270,7 +270,7 @@

    QF_LinearRealArith (Model Validation Track)

    - + SMTInterpol 0 54388370.54983470.29939 @@ -279,7 +279,7 @@

    QF_LinearRealArith (Model Validation Track)

    - + MathSAT5n 0 53970244.59370252.64439 @@ -288,7 +288,7 @@

    QF_LinearRealArith (Model Validation Track)

    - + 2020-OpenSMTn 0 46423506.70823482.289 @@ -312,7 +312,6 @@

    QF_LinearRealArith (Model Validation Track)

    - + - diff --git a/archive/2021/results/qf-linearrealarith-parallel.html b/archive/2021/results/qf-linearrealarith-parallel.html index 56d385d2..b8c71c59 100644 --- a/archive/2021/results/qf-linearrealarith-parallel.html +++ b/archive/2021/results/qf-linearrealarith-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LinearRealArith (Parallel Track)

    Competition results for the QF_LinearRealArith - + division - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    QF_LinearRealArith (Parallel Track)

    - + Par4n 0 1110352.5941165606 @@ -126,7 +126,7 @@

    QF_LinearRealArith (Parallel Track)

    - + cvc5-gg 0 417800.46541313013 @@ -146,7 +146,7 @@

    QF_LinearRealArith (Parallel Track)

    - + Par4n 0 63797.5660296 @@ -155,7 +155,7 @@

    QF_LinearRealArith (Parallel Track)

    - + cvc5-gg 0 18942.1431107913 @@ -175,7 +175,7 @@

    QF_LinearRealArith (Parallel Track)

    - + Par4n 0 52955.0945051116 @@ -184,7 +184,7 @@

    QF_LinearRealArith (Parallel Track)

    - + cvc5-gg 0 35258.32230331113 @@ -204,7 +204,7 @@

    QF_LinearRealArith (Parallel Track)

    - + Par4n 0 4337.54143113013 @@ -213,7 +213,7 @@

    QF_LinearRealArith (Parallel Track)

    - + cvc5-gg 0 1393.60110116016 @@ -237,7 +237,6 @@

    QF_LinearRealArith (Parallel Track)

    - + - diff --git a/archive/2021/results/qf-linearrealarith-single-query.html b/archive/2021/results/qf-linearrealarith-single-query.html index 6147b1ba..333e3f39 100644 --- a/archive/2021/results/qf-linearrealarith-single-query.html +++ b/archive/2021/results/qf-linearrealarith-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LinearRealArith (Single Query Track)

    Competition results for the QF_LinearRealArith - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_LinearRealArith (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Yices2Yices2Yices2 - - + + cvc5 - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_LinearRealArith (Single Query Track)

    - + Yices2 0 753 @@ -142,7 +142,7 @@

    QF_LinearRealArith (Single Query Track)

    - + cvc5 0 751 @@ -153,7 +153,7 @@

    QF_LinearRealArith (Single Query Track)

    - + OpenSMT 0 734 @@ -164,7 +164,7 @@

    QF_LinearRealArith (Single Query Track)

    - + z3n 0 733 @@ -175,7 +175,7 @@

    QF_LinearRealArith (Single Query Track)

    - + veriT 0 723 @@ -186,7 +186,7 @@

    QF_LinearRealArith (Single Query Track)

    - + SMTInterpol 0 681 @@ -197,7 +197,7 @@

    QF_LinearRealArith (Single Query Track)

    - + MathSAT5n 0 673 @@ -208,7 +208,7 @@

    QF_LinearRealArith (Single Query Track)

    - + 2020-OpenSMTn 0 543 @@ -219,7 +219,7 @@

    QF_LinearRealArith (Single Query Track)

    - + 2019-Par4n 0 536 @@ -230,7 +230,7 @@

    QF_LinearRealArith (Single Query Track)

    - + 2019-Yices 2.6.2n 0 213 @@ -241,7 +241,7 @@

    QF_LinearRealArith (Single Query Track)

    - + mc2 0 201 @@ -263,7 +263,7 @@

    QF_LinearRealArith (Single Query Track)

    - + Yices2 0 753124307.302124275.29375342632776076 @@ -272,7 +272,7 @@

    QF_LinearRealArith (Single Query Track)

    - + cvc5 0 751140063.078140109.82675141933278078 @@ -281,7 +281,7 @@

    QF_LinearRealArith (Single Query Track)

    - + OpenSMT 0 734151937.707151780.84173441232295095 @@ -290,7 +290,7 @@

    QF_LinearRealArith (Single Query Track)

    - + z3n 0 733160821.683160791.65673340832596096 @@ -299,7 +299,7 @@

    QF_LinearRealArith (Single Query Track)

    - + veriT 0 723174326.675174306.2037234033201060106 @@ -308,7 +308,7 @@

    QF_LinearRealArith (Single Query Track)

    - + SMTInterpol 0 684271764.007261548.0086844042801450145 @@ -317,7 +317,7 @@

    QF_LinearRealArith (Single Query Track)

    - + MathSAT5n 0 673231110.727231092.4986733922811560156 @@ -326,7 +326,7 @@

    QF_LinearRealArith (Single Query Track)

    - + 2019-Par4n 0 565136198.30849817.5945653372281724717 @@ -335,7 +335,7 @@

    QF_LinearRealArith (Single Query Track)

    - + 2020-OpenSMTn 0 54369805.67369784.1585433142293924739 @@ -344,7 +344,7 @@

    QF_LinearRealArith (Single Query Track)

    - + 2019-Yices 2.6.2n 0 21343120.35443118.1142131051083458234 @@ -353,7 +353,7 @@

    QF_LinearRealArith (Single Query Track)

    - + mc2 0 20149390.40949245.183201123783812477 @@ -373,7 +373,7 @@

    QF_LinearRealArith (Single Query Track)

    - + Yices2 0 42636786.77136751.90742642602238176 @@ -382,7 +382,7 @@

    QF_LinearRealArith (Single Query Track)

    - + cvc5 0 41960731.12560777.21741941902938178 @@ -391,7 +391,7 @@

    QF_LinearRealArith (Single Query Track)

    - + OpenSMT 0 41265000.72464956.50841241203638195 @@ -400,7 +400,7 @@

    QF_LinearRealArith (Single Query Track)

    - + z3n 0 40869826.90969818.71940840804038196 @@ -409,7 +409,7 @@

    QF_LinearRealArith (Single Query Track)

    - + SMTInterpol 0 404103654.44297788.263404404044381145 @@ -418,7 +418,7 @@

    QF_LinearRealArith (Single Query Track)

    - + veriT 0 40380766.86380765.879403403045381106 @@ -427,7 +427,7 @@

    QF_LinearRealArith (Single Query Track)

    - + MathSAT5n 0 39289802.29389780.813392392056381156 @@ -436,7 +436,7 @@

    QF_LinearRealArith (Single Query Track)

    - + 2019-Par4n 0 33776444.16224807.4343373370648617 @@ -445,7 +445,7 @@

    QF_LinearRealArith (Single Query Track)

    - + 2020-OpenSMTn 0 31450396.72550399.48731431402948639 @@ -454,7 +454,7 @@

    QF_LinearRealArith (Single Query Track)

    - + mc2 0 12321834.74621746.45412312302204867 @@ -463,7 +463,7 @@

    QF_LinearRealArith (Single Query Track)

    - + 2019-Yices 2.6.2n 0 105767.8766.9531051050072434 @@ -483,7 +483,7 @@

    QF_LinearRealArith (Single Query Track)

    - + cvc5 0 33236131.95236132.60933203321348478 @@ -492,7 +492,7 @@

    QF_LinearRealArith (Single Query Track)

    - + Yices2 0 32744320.53144323.38632703271848476 @@ -501,7 +501,7 @@

    QF_LinearRealArith (Single Query Track)

    - + z3n 0 32547794.77447772.93732503252048496 @@ -510,7 +510,7 @@

    QF_LinearRealArith (Single Query Track)

    - + OpenSMT 0 32243736.98443624.33332203222348495 @@ -519,7 +519,7 @@

    QF_LinearRealArith (Single Query Track)

    - + veriT 0 32050359.81250340.324320032025484106 @@ -528,7 +528,7 @@

    QF_LinearRealArith (Single Query Track)

    - + MathSAT5n 0 28198108.43498111.685281028164484156 @@ -537,7 +537,7 @@

    QF_LinearRealArith (Single Query Track)

    - + SMTInterpol 0 280124909.565120559.746280028065484145 @@ -546,7 +546,7 @@

    QF_LinearRealArith (Single Query Track)

    - + 2020-OpenSMTn 0 22915808.94815784.672290229759339 @@ -555,7 +555,7 @@

    QF_LinearRealArith (Single Query Track)

    - + 2019-Par4n 0 22856154.14721410.1612280228859317 @@ -564,7 +564,7 @@

    QF_LinearRealArith (Single Query Track)

    - + 2019-Yices 2.6.2n 0 1082752.5542751.1621080108172034 @@ -573,7 +573,7 @@

    QF_LinearRealArith (Single Query Track)

    - + mc2 0 7827382.04327325.08780781585937 @@ -593,7 +593,7 @@

    QF_LinearRealArith (Single Query Track)

    - + Yices2 0 6186604.7716569.3496183762422110211 @@ -602,7 +602,7 @@

    QF_LinearRealArith (Single Query Track)

    - + OpenSMT 0 5528764.9658697.8095523122402770277 @@ -611,7 +611,7 @@

    QF_LinearRealArith (Single Query Track)

    - + veriT 0 5228963.1898936.9275222922303070307 @@ -620,7 +620,7 @@

    QF_LinearRealArith (Single Query Track)

    - + z3n 0 5209023.2959013.3115202992213090309 @@ -629,7 +629,7 @@

    QF_LinearRealArith (Single Query Track)

    - + cvc5 0 5039637.4399593.2795032952083260326 @@ -638,7 +638,7 @@

    QF_LinearRealArith (Single Query Track)

    - + MathSAT5n 0 4979265.3529265.8464972932043320332 @@ -647,7 +647,7 @@

    QF_LinearRealArith (Single Query Track)

    - + SMTInterpol 0 41114610.03612026.8754112561554180418 @@ -656,7 +656,7 @@

    QF_LinearRealArith (Single Query Track)

    - + 2020-OpenSMTn 0 4035989.615964.999403227176179247179 @@ -665,7 +665,7 @@

    QF_LinearRealArith (Single Query Track)

    - + 2019-Par4n 0 40210611.9225947.62402246156180247180 @@ -674,7 +674,7 @@

    QF_LinearRealArith (Single Query Track)

    - + 2019-Yices 2.6.2n 0 1951624.8931622.445195100955258252 @@ -683,7 +683,7 @@

    QF_LinearRealArith (Single Query Track)

    - + mc2 0 1758963.5438962.92217510273407247273 @@ -707,7 +707,6 @@

    QF_LinearRealArith (Single Query Track)

    - + - diff --git a/archive/2021/results/qf-linearrealarith-unsat-core.html b/archive/2021/results/qf-linearrealarith-unsat-core.html index 9bd49e30..a400504a 100644 --- a/archive/2021/results/qf-linearrealarith-unsat-core.html +++ b/archive/2021/results/qf-linearrealarith-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LinearRealArith (Unsat Core Track)

    Competition results for the QF_LinearRealArith - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_LinearRealArith (Unsat Core Track)

    Sequential PerformanceParallel Performance Yices2Yices2 - - + + @@ -126,7 +126,7 @@

    QF_LinearRealArith (Unsat Core Track)

    - + 2020-Yices2n 0 146557 @@ -137,7 +137,7 @@

    QF_LinearRealArith (Unsat Core Track)

    - + Yices2 0 140760 @@ -148,7 +148,7 @@

    QF_LinearRealArith (Unsat Core Track)

    - + cvc5-uc 0 138734 @@ -159,7 +159,7 @@

    QF_LinearRealArith (Unsat Core Track)

    - + z3n 0 118077 @@ -170,7 +170,7 @@

    QF_LinearRealArith (Unsat Core Track)

    - + MathSAT5n 0 115011 @@ -181,7 +181,7 @@

    QF_LinearRealArith (Unsat Core Track)

    - + SMTInterpol 0 93971 @@ -192,7 +192,7 @@

    QF_LinearRealArith (Unsat Core Track)

    - + SMTInterpol-remus 0 75322 @@ -214,7 +214,7 @@

    QF_LinearRealArith (Unsat Core Track)

    - + 2020-Yices2n 0 14655768749.05968694.63337 @@ -223,7 +223,7 @@

    QF_LinearRealArith (Unsat Core Track)

    - + Yices2 0 14076080307.66280295.82347 @@ -232,7 +232,7 @@

    QF_LinearRealArith (Unsat Core Track)

    - + cvc5-uc 0 13873445086.49745090.85314 @@ -241,7 +241,7 @@

    QF_LinearRealArith (Unsat Core Track)

    - + z3n 0 11807773541.29373438.09833 @@ -250,7 +250,7 @@

    QF_LinearRealArith (Unsat Core Track)

    - + MathSAT5n 0 115011146173.094146176.625106 @@ -259,7 +259,7 @@

    QF_LinearRealArith (Unsat Core Track)

    - + SMTInterpol 0 94356107328.87103543.27251 @@ -268,7 +268,7 @@

    QF_LinearRealArith (Unsat Core Track)

    - + SMTInterpol-remus 0 80954158919.954153154.5853 @@ -292,7 +292,6 @@

    QF_LinearRealArith (Unsat Core Track)

    - + - diff --git a/archive/2021/results/qf-lira-model-validation.html b/archive/2021/results/qf-lira-model-validation.html index 8f082395..ed036b57 100644 --- a/archive/2021/results/qf-lira-model-validation.html +++ b/archive/2021/results/qf-lira-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LIRA (Model Validation Track)

    Competition results for the QF_LIRA - + logic - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_LIRA (Model Validation Track)

    Sequential PerformanceParallel Performance Yices2 model-validationYices2 model-validation - - + + @@ -126,7 +126,7 @@

    QF_LIRA (Model Validation Track)

    - + 2020-Yices2 Model Validationn 0 1 @@ -137,7 +137,7 @@

    QF_LIRA (Model Validation Track)

    - + 2020-Yices2-fixed Model Validationn 0 1 @@ -148,7 +148,7 @@

    QF_LIRA (Model Validation Track)

    - + Yices2 model-validation 0 1 @@ -159,7 +159,7 @@

    QF_LIRA (Model Validation Track)

    - + 2020-z3n 0 1 @@ -170,7 +170,7 @@

    QF_LIRA (Model Validation Track)

    - + z3-mvn 0 1 @@ -181,7 +181,7 @@

    QF_LIRA (Model Validation Track)

    - + MathSAT5n 0 1 @@ -192,7 +192,7 @@

    QF_LIRA (Model Validation Track)

    - + cvc5-mv 0 1 @@ -203,7 +203,7 @@

    QF_LIRA (Model Validation Track)

    - + SMTInterpol 0 1 @@ -225,7 +225,7 @@

    QF_LIRA (Model Validation Track)

    - + 2020-Yices2 Model Validationn 0 10.1170.1170 @@ -234,7 +234,7 @@

    QF_LIRA (Model Validation Track)

    - + 2020-Yices2-fixed Model Validationn 0 10.1170.1170 @@ -243,7 +243,7 @@

    QF_LIRA (Model Validation Track)

    - + Yices2 model-validation 0 10.1180.1180 @@ -252,7 +252,7 @@

    QF_LIRA (Model Validation Track)

    - + 2020-z3n 0 10.3360.3360 @@ -261,7 +261,7 @@

    QF_LIRA (Model Validation Track)

    - + z3-mvn 0 10.3570.3410 @@ -270,7 +270,7 @@

    QF_LIRA (Model Validation Track)

    - + MathSAT5n 0 10.4760.4790 @@ -279,7 +279,7 @@

    QF_LIRA (Model Validation Track)

    - + cvc5-mv 0 12.3532.3520 @@ -288,7 +288,7 @@

    QF_LIRA (Model Validation Track)

    - + SMTInterpol 0 118.2366.2620 @@ -312,7 +312,6 @@

    QF_LIRA (Model Validation Track)

    - + - diff --git a/archive/2021/results/qf-lira-single-query.html b/archive/2021/results/qf-lira-single-query.html index a32b662f..341120f4 100644 --- a/archive/2021/results/qf-lira-single-query.html +++ b/archive/2021/results/qf-lira-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LIRA (Single Query Track)

    Competition results for the QF_LIRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_LIRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Yices2Yices2Yices2 - - + + Yices2 - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_LIRA (Single Query Track)

    - + 2019-Par4n 0 7 @@ -142,7 +142,7 @@

    QF_LIRA (Single Query Track)

    - + z3n 0 6 @@ -153,7 +153,7 @@

    QF_LIRA (Single Query Track)

    - + Yices2 0 6 @@ -164,7 +164,7 @@

    QF_LIRA (Single Query Track)

    - + cvc5 0 6 @@ -175,7 +175,7 @@

    QF_LIRA (Single Query Track)

    - + cvc5 - fixedn 0 6 @@ -186,7 +186,7 @@

    QF_LIRA (Single Query Track)

    - + veriT 0 5 @@ -197,7 +197,7 @@

    QF_LIRA (Single Query Track)

    - + MathSAT5n 0 5 @@ -208,7 +208,7 @@

    QF_LIRA (Single Query Track)

    - + SMTInterpol 0 4 @@ -230,7 +230,7 @@

    QF_LIRA (Single Query Track)

    - + 2019-Par4n 0 7240.559121.11471600 @@ -239,7 +239,7 @@

    QF_LIRA (Single Query Track)

    - + z3n 0 61316.9221316.93861511 @@ -248,7 +248,7 @@

    QF_LIRA (Single Query Track)

    - + Yices2 0 61319.8661319.88561511 @@ -257,7 +257,7 @@

    QF_LIRA (Single Query Track)

    - + cvc5 0 62300.7282301.10261511 @@ -266,7 +266,7 @@

    QF_LIRA (Single Query Track)

    - + cvc5 - fixedn 0 62314.7282314.81561511 @@ -275,7 +275,7 @@

    QF_LIRA (Single Query Track)

    - + veriT 0 52405.3792405.37851422 @@ -284,7 +284,7 @@

    QF_LIRA (Single Query Track)

    - + MathSAT5n 0 52409.3942409.39751422 @@ -293,7 +293,7 @@

    QF_LIRA (Single Query Track)

    - + SMTInterpol 0 43730.9753676.45941333 @@ -313,7 +313,7 @@

    QF_LIRA (Single Query Track)

    - + Yices2 0 10.0870.087110061 @@ -322,7 +322,7 @@

    QF_LIRA (Single Query Track)

    - + MathSAT5n 0 10.2410.244110062 @@ -331,7 +331,7 @@

    QF_LIRA (Single Query Track)

    - + 2019-Par4n 0 10.0060.274110060 @@ -340,7 +340,7 @@

    QF_LIRA (Single Query Track)

    - + z3n 0 10.2950.295110061 @@ -349,7 +349,7 @@

    QF_LIRA (Single Query Track)

    - + cvc5 0 11.3711.37110061 @@ -358,7 +358,7 @@

    QF_LIRA (Single Query Track)

    - + cvc5 - fixedn 0 11.391.389110061 @@ -367,7 +367,7 @@

    QF_LIRA (Single Query Track)

    - + veriT 0 12.3892.388110062 @@ -376,7 +376,7 @@

    QF_LIRA (Single Query Track)

    - + SMTInterpol 0 118.3516.007110063 @@ -396,7 +396,7 @@

    QF_LIRA (Single Query Track)

    - + 2019-Par4n 0 6240.553120.84606010 @@ -405,7 +405,7 @@

    QF_LIRA (Single Query Track)

    - + z3n 0 51316.6271316.642505111 @@ -414,7 +414,7 @@

    QF_LIRA (Single Query Track)

    - + Yices2 0 51319.7791319.798505111 @@ -423,7 +423,7 @@

    QF_LIRA (Single Query Track)

    - + cvc5 0 52299.3582299.732505111 @@ -432,7 +432,7 @@

    QF_LIRA (Single Query Track)

    - + cvc5 - fixedn 0 52313.3382313.426505111 @@ -441,7 +441,7 @@

    QF_LIRA (Single Query Track)

    - + veriT 0 42402.992402.989404212 @@ -450,7 +450,7 @@

    QF_LIRA (Single Query Track)

    - + MathSAT5n 0 42409.1532409.154404212 @@ -459,7 +459,7 @@

    QF_LIRA (Single Query Track)

    - + SMTInterpol 0 33712.6243670.452303313 @@ -479,7 +479,7 @@

    QF_LIRA (Single Query Track)

    - + Yices2 0 548.4248.42251422 @@ -488,7 +488,7 @@

    QF_LIRA (Single Query Track)

    - + z3n 0 550.57250.57351422 @@ -497,7 +497,7 @@

    QF_LIRA (Single Query Track)

    - + 2019-Par4n 0 552.63950.91851422 @@ -506,7 +506,7 @@

    QF_LIRA (Single Query Track)

    - + veriT 0 553.37953.37851422 @@ -515,7 +515,7 @@

    QF_LIRA (Single Query Track)

    - + MathSAT5n 0 557.39457.39751422 @@ -524,7 +524,7 @@

    QF_LIRA (Single Query Track)

    - + cvc5 0 558.25858.26251422 @@ -533,7 +533,7 @@

    QF_LIRA (Single Query Track)

    - + cvc5 - fixedn 0 558.30858.30551422 @@ -542,7 +542,7 @@

    QF_LIRA (Single Query Track)

    - + SMTInterpol 0 3130.138107.2431244 @@ -566,7 +566,6 @@

    QF_LIRA (Single Query Track)

    - + - diff --git a/archive/2021/results/qf-lira-unsat-core.html b/archive/2021/results/qf-lira-unsat-core.html index 518409a8..e709638d 100644 --- a/archive/2021/results/qf-lira-unsat-core.html +++ b/archive/2021/results/qf-lira-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LIRA (Unsat Core Track)

    Competition results for the QF_LIRA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_LIRA (Unsat Core Track)

    Sequential PerformanceParallel Performance SMTInterpol-remusSMTInterpol-remus - - + + @@ -126,7 +126,7 @@

    QF_LIRA (Unsat Core Track)

    - + SMTInterpol-remus 0 2 @@ -137,7 +137,7 @@

    QF_LIRA (Unsat Core Track)

    - + Yices2 0 0 @@ -148,7 +148,7 @@

    QF_LIRA (Unsat Core Track)

    - + 2020-Yices2-fixedn 0 0 @@ -159,7 +159,7 @@

    QF_LIRA (Unsat Core Track)

    - + z3n 0 0 @@ -170,7 +170,7 @@

    QF_LIRA (Unsat Core Track)

    - + cvc5-uc 0 0 @@ -181,7 +181,7 @@

    QF_LIRA (Unsat Core Track)

    - + MathSAT5n 0 0 @@ -192,7 +192,7 @@

    QF_LIRA (Unsat Core Track)

    - + SMTInterpol 0 0 @@ -214,7 +214,7 @@

    QF_LIRA (Unsat Core Track)

    - + SMTInterpol-remus 0 22732.9612657.6412 @@ -223,7 +223,7 @@

    QF_LIRA (Unsat Core Track)

    - + Yices2 0 0165.575165.6190 @@ -232,7 +232,7 @@

    QF_LIRA (Unsat Core Track)

    - + 2020-Yices2-fixedn 0 0168.335168.3570 @@ -241,7 +241,7 @@

    QF_LIRA (Unsat Core Track)

    - + z3n 0 01204.9261204.9271 @@ -250,7 +250,7 @@

    QF_LIRA (Unsat Core Track)

    - + cvc5-uc 0 01213.0851213.2760 @@ -259,7 +259,7 @@

    QF_LIRA (Unsat Core Track)

    - + MathSAT5n 0 01219.9591219.960 @@ -268,7 +268,7 @@

    QF_LIRA (Unsat Core Track)

    - + SMTInterpol 0 02515.1132471.5072 @@ -292,7 +292,6 @@

    QF_LIRA (Unsat Core Track)

    - + - diff --git a/archive/2021/results/qf-lra-cloud.html b/archive/2021/results/qf-lra-cloud.html index c1e28c8f..4a7f96e5 100644 --- a/archive/2021/results/qf-lra-cloud.html +++ b/archive/2021/results/qf-lra-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LRA (Cloud Track)

    Competition results for the QF_LRA - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    QF_LRA (Cloud Track)

    - + SMTS portfolio 0 127828.708125744 @@ -126,7 +126,7 @@

    QF_LRA (Cloud Track)

    - + SMTS cube-and-conquer 0 128513.236125744 @@ -135,7 +135,7 @@

    QF_LRA (Cloud Track)

    - + Par4n 0 119415.435116555 @@ -144,7 +144,7 @@

    QF_LRA (Cloud Track)

    - + cvc5-gg 0 416591.2254131212 @@ -164,7 +164,7 @@

    QF_LRA (Cloud Track)

    - + Par4n 0 63936.294660285 @@ -173,7 +173,7 @@

    QF_LRA (Cloud Track)

    - + SMTS portfolio 0 55581.365550384 @@ -182,7 +182,7 @@

    QF_LRA (Cloud Track)

    - + SMTS cube-and-conquer 0 55683.292550384 @@ -191,7 +191,7 @@

    QF_LRA (Cloud Track)

    - + cvc5-gg 0 18941.4371107812 @@ -211,7 +211,7 @@

    QF_LRA (Cloud Track)

    - + SMTS portfolio 0 71047.343707094 @@ -220,7 +220,7 @@

    QF_LRA (Cloud Track)

    - + SMTS cube-and-conquer 0 71629.944707094 @@ -229,7 +229,7 @@

    QF_LRA (Cloud Track)

    - + Par4n 0 54279.141505295 @@ -238,7 +238,7 @@

    QF_LRA (Cloud Track)

    - + cvc5-gg 0 36449.7883034912 @@ -258,7 +258,7 @@

    QF_LRA (Cloud Track)

    - + SMTS cube-and-conquer 0 4295.2254311212 @@ -267,7 +267,7 @@

    QF_LRA (Cloud Track)

    - + SMTS portfolio 0 4300.7034311212 @@ -276,7 +276,7 @@

    QF_LRA (Cloud Track)

    - + Par4n 0 4315.2634311212 @@ -285,7 +285,7 @@

    QF_LRA (Cloud Track)

    - + cvc5-gg 0 1369.6041011515 @@ -309,7 +309,6 @@

    QF_LRA (Cloud Track)

    - + - diff --git a/archive/2021/results/qf-lra-incremental.html b/archive/2021/results/qf-lra-incremental.html index c40cee19..d753a693 100644 --- a/archive/2021/results/qf-lra-incremental.html +++ b/archive/2021/results/qf-lra-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LRA (Incremental Track)

    Competition results for the QF_LRA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_LRA (Incremental Track)

    Parallel Performance OpenSMT - - + + @@ -124,7 +124,7 @@

    QF_LRA (Incremental Track)

    - + 2018-MathSAT (incremental)n 0 12638695.2318694.9872525 @@ -133,7 +133,7 @@

    QF_LRA (Incremental Track)

    - + MathSAT5n 0 94910527.16110527.6685668 @@ -142,7 +142,7 @@

    QF_LRA (Incremental Track)

    - + OpenSMT 0 9429755.3859754.0215736 @@ -151,7 +151,7 @@

    QF_LRA (Incremental Track)

    - + Yices2 incremental 0 91412000.012000.060110 @@ -160,7 +160,7 @@

    QF_LRA (Incremental Track)

    - + cvc5-inc 0 67212000.012000.084310 @@ -169,7 +169,7 @@

    QF_LRA (Incremental Track)

    - + SMTInterpol 0 61112000.012000.090410 @@ -178,7 +178,7 @@

    QF_LRA (Incremental Track)

    - + z3n 0 58612000.012000.092910 @@ -202,7 +202,6 @@

    QF_LRA (Incremental Track)

    - + - diff --git a/archive/2021/results/qf-lra-model-validation.html b/archive/2021/results/qf-lra-model-validation.html index 41dc6bfa..2c242c77 100644 --- a/archive/2021/results/qf-lra-model-validation.html +++ b/archive/2021/results/qf-lra-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LRA (Model Validation Track)

    Competition results for the QF_LRA - + logic - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_LRA (Model Validation Track)

    Sequential PerformanceParallel Performance OpenSMTOpenSMT - - + + @@ -126,7 +126,7 @@

    QF_LRA (Model Validation Track)

    - + OpenSMT 0 464 @@ -137,7 +137,7 @@

    QF_LRA (Model Validation Track)

    - + 2020-OpenSMTn 0 464 @@ -148,7 +148,7 @@

    QF_LRA (Model Validation Track)

    - + z3-mvn 0 459 @@ -159,7 +159,7 @@

    QF_LRA (Model Validation Track)

    - + Yices2 model-validation 0 458 @@ -170,7 +170,7 @@

    QF_LRA (Model Validation Track)

    - + cvc5-mv 0 452 @@ -181,7 +181,7 @@

    QF_LRA (Model Validation Track)

    - + 2020-Yices2-fixed Model Validationn 0 451 @@ -192,7 +192,7 @@

    QF_LRA (Model Validation Track)

    - + SMTInterpol 0 440 @@ -203,7 +203,7 @@

    QF_LRA (Model Validation Track)

    - + MathSAT5n 0 432 @@ -225,7 +225,7 @@

    QF_LRA (Model Validation Track)

    - + OpenSMT 0 46422435.8722445.4869 @@ -234,7 +234,7 @@

    QF_LRA (Model Validation Track)

    - + 2020-OpenSMTn 0 46423506.70823482.289 @@ -243,7 +243,7 @@

    QF_LRA (Model Validation Track)

    - + z3-mvn 0 45938431.75938430.48414 @@ -252,7 +252,7 @@

    QF_LRA (Model Validation Track)

    - + Yices2 model-validation 0 45826128.95426123.06115 @@ -261,7 +261,7 @@

    QF_LRA (Model Validation Track)

    - + cvc5-mv 0 45245550.94445539.01621 @@ -270,7 +270,7 @@

    QF_LRA (Model Validation Track)

    - + 2020-Yices2-fixed Model Validationn 0 45132126.4932128.27922 @@ -279,7 +279,7 @@

    QF_LRA (Model Validation Track)

    - + SMTInterpol 0 44074196.74670327.33733 @@ -288,7 +288,7 @@

    QF_LRA (Model Validation Track)

    - + MathSAT5n 0 43263468.27363475.64137 @@ -312,7 +312,6 @@

    QF_LRA (Model Validation Track)

    - + - diff --git a/archive/2021/results/qf-lra-parallel.html b/archive/2021/results/qf-lra-parallel.html index 15a7c09b..38f3acac 100644 --- a/archive/2021/results/qf-lra-parallel.html +++ b/archive/2021/results/qf-lra-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LRA (Parallel Track)

    Competition results for the QF_LRA - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    QF_LRA (Parallel Track)

    - + Par4n 0 1110352.594116566 @@ -126,7 +126,7 @@

    QF_LRA (Parallel Track)

    - + cvc5-gg 0 417800.4654131313 @@ -146,7 +146,7 @@

    QF_LRA (Parallel Track)

    - + Par4n 0 63797.5660296 @@ -155,7 +155,7 @@

    QF_LRA (Parallel Track)

    - + cvc5-gg 0 18942.1431107913 @@ -175,7 +175,7 @@

    QF_LRA (Parallel Track)

    - + Par4n 0 52955.0945051116 @@ -184,7 +184,7 @@

    QF_LRA (Parallel Track)

    - + cvc5-gg 0 35258.32230331113 @@ -204,7 +204,7 @@

    QF_LRA (Parallel Track)

    - + Par4n 0 4337.5414311313 @@ -213,7 +213,7 @@

    QF_LRA (Parallel Track)

    - + cvc5-gg 0 1393.6011011616 @@ -237,7 +237,6 @@

    QF_LRA (Parallel Track)

    - + - diff --git a/archive/2021/results/qf-lra-single-query.html b/archive/2021/results/qf-lra-single-query.html index 94d8c915..5631fea7 100644 --- a/archive/2021/results/qf-lra-single-query.html +++ b/archive/2021/results/qf-lra-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LRA (Single Query Track)

    Competition results for the QF_LRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_LRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) OpenSMTOpenSMTYices2 - - + + OpenSMT - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_LRA (Single Query Track)

    - + OpenSMT 0 546 @@ -142,7 +142,7 @@

    QF_LRA (Single Query Track)

    - + 2020-OpenSMTn 0 543 @@ -153,7 +153,7 @@

    QF_LRA (Single Query Track)

    - + cvc5 0 541 @@ -164,7 +164,7 @@

    QF_LRA (Single Query Track)

    - + Yices2 0 540 @@ -175,7 +175,7 @@

    QF_LRA (Single Query Track)

    - + 2019-Par4n 0 536 @@ -186,7 +186,7 @@

    QF_LRA (Single Query Track)

    - + z3n 0 527 @@ -197,7 +197,7 @@

    QF_LRA (Single Query Track)

    - + veriT 0 513 @@ -208,7 +208,7 @@

    QF_LRA (Single Query Track)

    - + SMTInterpol 0 494 @@ -219,7 +219,7 @@

    QF_LRA (Single Query Track)

    - + MathSAT5n 0 471 @@ -230,7 +230,7 @@

    QF_LRA (Single Query Track)

    - + mc2 0 201 @@ -252,7 +252,7 @@

    QF_LRA (Single Query Track)

    - + 2019-Par4n 0 565136198.30849817.5945653372281717 @@ -261,7 +261,7 @@

    QF_LRA (Single Query Track)

    - + OpenSMT 0 54667179.8367079.5085463152313636 @@ -270,7 +270,7 @@

    QF_LRA (Single Query Track)

    - + 2020-OpenSMTn 0 54369805.67369784.1585433142293939 @@ -279,7 +279,7 @@

    QF_LRA (Single Query Track)

    - + cvc5 0 54187891.48187987.0285413162254141 @@ -288,7 +288,7 @@

    QF_LRA (Single Query Track)

    - + Yices2 0 54080992.64680960.3435403212194242 @@ -297,7 +297,7 @@

    QF_LRA (Single Query Track)

    - + z3n 0 527106509.904106477.6115273092185555 @@ -306,7 +306,7 @@

    QF_LRA (Single Query Track)

    - + veriT 0 513122797.797122781.1665132992146969 @@ -315,7 +315,7 @@

    QF_LRA (Single Query Track)

    - + SMTInterpol 0 497184390.714176238.6074973051928585 @@ -324,7 +324,7 @@

    QF_LRA (Single Query Track)

    - + MathSAT5n 0 471166947.951166928.104471289182111111 @@ -333,7 +333,7 @@

    QF_LRA (Single Query Track)

    - + mc2 0 20149390.40949245.183201123783817 @@ -353,7 +353,7 @@

    QF_LRA (Single Query Track)

    - + 2019-Par4n 0 33776444.16224807.4343373370623917 @@ -362,7 +362,7 @@

    QF_LRA (Single Query Track)

    - + Yices2 0 32135993.44135958.47332132102223942 @@ -371,7 +371,7 @@

    QF_LRA (Single Query Track)

    - + cvc5 0 31654364.98954448.94831631602723941 @@ -380,7 +380,7 @@

    QF_LRA (Single Query Track)

    - + OpenSMT 0 31548871.21848860.23731531502823936 @@ -389,7 +389,7 @@

    QF_LRA (Single Query Track)

    - + 2020-OpenSMTn 0 31450396.72550399.48731431402923939 @@ -398,7 +398,7 @@

    QF_LRA (Single Query Track)

    - + z3n 0 30959018.63559011.14330930903423955 @@ -407,7 +407,7 @@

    QF_LRA (Single Query Track)

    - + SMTInterpol 0 30589642.75884688.70630530503823985 @@ -416,7 +416,7 @@

    QF_LRA (Single Query Track)

    - + veriT 0 29976340.15276343.3729929904423969 @@ -425,7 +425,7 @@

    QF_LRA (Single Query Track)

    - + MathSAT5n 0 28983048.35183026.4289289054239111 @@ -434,7 +434,7 @@

    QF_LRA (Single Query Track)

    - + mc2 0 12321834.74621746.45412312302202397 @@ -454,7 +454,7 @@

    QF_LRA (Single Query Track)

    - + OpenSMT 0 23114708.61214619.2712310231534636 @@ -463,7 +463,7 @@

    QF_LRA (Single Query Track)

    - + 2020-OpenSMTn 0 22915808.94815784.672290229734639 @@ -472,7 +472,7 @@

    QF_LRA (Single Query Track)

    - + 2019-Par4n 0 22856154.14721410.1612280228834617 @@ -481,7 +481,7 @@

    QF_LRA (Single Query Track)

    - + cvc5 0 22529926.49229938.0822502251134641 @@ -490,7 +490,7 @@

    QF_LRA (Single Query Track)

    - + Yices2 0 21941399.20441401.8721902191734642 @@ -499,7 +499,7 @@

    QF_LRA (Single Query Track)

    - + z3n 0 21843891.26943866.46921802181834655 @@ -508,7 +508,7 @@

    QF_LRA (Single Query Track)

    - + veriT 0 21442857.64542837.79621402142234669 @@ -517,7 +517,7 @@

    QF_LRA (Single Query Track)

    - + SMTInterpol 0 19291147.95687949.90119201924434685 @@ -526,7 +526,7 @@

    QF_LRA (Single Query Track)

    - + MathSAT5n 0 18280299.680301.704182018254346111 @@ -535,7 +535,7 @@

    QF_LRA (Single Query Track)

    - + mc2 0 7827382.04327325.08780781583467 @@ -555,7 +555,7 @@

    QF_LRA (Single Query Track)

    - + Yices2 0 4244960.8564925.278424276148158158 @@ -564,7 +564,7 @@

    QF_LRA (Single Query Track)

    - + OpenSMT 0 4155715.2935679.345415238177167167 @@ -573,7 +573,7 @@

    QF_LRA (Single Query Track)

    - + 2020-OpenSMTn 0 4035989.615964.999403227176179179 @@ -582,7 +582,7 @@

    QF_LRA (Single Query Track)

    - + 2019-Par4n 0 40210611.9225947.62402246156180180 @@ -591,7 +591,7 @@

    QF_LRA (Single Query Track)

    - + veriT 0 3526711.66690.108352209143230230 @@ -600,7 +600,7 @@

    QF_LRA (Single Query Track)

    - + z3n 0 3396878.4996866.761339210129243243 @@ -609,7 +609,7 @@

    QF_LRA (Single Query Track)

    - + cvc5 0 3377066.7627034.204337211126245245 @@ -618,7 +618,7 @@

    QF_LRA (Single Query Track)

    - + MathSAT5n 0 3346778.4036778.814334207127248248 @@ -627,7 +627,7 @@

    QF_LRA (Single Query Track)

    - + SMTInterpol 0 28410347.6988514.98528418797298298 @@ -636,7 +636,7 @@

    QF_LRA (Single Query Track)

    - + mc2 0 1758963.5438962.92217510273407273 @@ -660,7 +660,6 @@

    QF_LRA (Single Query Track)

    - + - diff --git a/archive/2021/results/qf-lra-unsat-core.html b/archive/2021/results/qf-lra-unsat-core.html index 38541383..2bfcc3fe 100644 --- a/archive/2021/results/qf-lra-unsat-core.html +++ b/archive/2021/results/qf-lra-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LRA (Unsat Core Track)

    Competition results for the QF_LRA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_LRA (Unsat Core Track)

    Sequential PerformanceParallel Performance Yices2Yices2 - - + + @@ -126,7 +126,7 @@

    QF_LRA (Unsat Core Track)

    - + 2020-Yices2n 0 146557 @@ -137,7 +137,7 @@

    QF_LRA (Unsat Core Track)

    - + Yices2 0 140760 @@ -148,7 +148,7 @@

    QF_LRA (Unsat Core Track)

    - + cvc5-uc 0 138734 @@ -159,7 +159,7 @@

    QF_LRA (Unsat Core Track)

    - + z3n 0 118077 @@ -170,7 +170,7 @@

    QF_LRA (Unsat Core Track)

    - + MathSAT5n 0 115011 @@ -181,7 +181,7 @@

    QF_LRA (Unsat Core Track)

    - + SMTInterpol 0 93971 @@ -192,7 +192,7 @@

    QF_LRA (Unsat Core Track)

    - + SMTInterpol-remus 0 75322 @@ -214,7 +214,7 @@

    QF_LRA (Unsat Core Track)

    - + 2020-Yices2n 0 14655768749.05968694.63337 @@ -223,7 +223,7 @@

    QF_LRA (Unsat Core Track)

    - + Yices2 0 14076080307.66280295.82347 @@ -232,7 +232,7 @@

    QF_LRA (Unsat Core Track)

    - + cvc5-uc 0 13873445086.49745090.85314 @@ -241,7 +241,7 @@

    QF_LRA (Unsat Core Track)

    - + z3n 0 11807773541.29373438.09833 @@ -250,7 +250,7 @@

    QF_LRA (Unsat Core Track)

    - + MathSAT5n 0 115011146173.094146176.625106 @@ -259,7 +259,7 @@

    QF_LRA (Unsat Core Track)

    - + SMTInterpol 0 94356107328.87103543.27251 @@ -268,7 +268,7 @@

    QF_LRA (Unsat Core Track)

    - + SMTInterpol-remus 0 80954158919.954153154.5853 @@ -292,7 +292,6 @@

    QF_LRA (Unsat Core Track)

    - + - diff --git a/archive/2021/results/qf-nia-cloud.html b/archive/2021/results/qf-nia-cloud.html index 228094f3..f8484562 100644 --- a/archive/2021/results/qf-nia-cloud.html +++ b/archive/2021/results/qf-nia-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_NIA (Cloud Track)

    Competition results for the QF_NIA - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    QF_NIA (Cloud Track)

    - + Par4n 0 613280.9976511010 @@ -126,7 +126,7 @@

    QF_NIA (Cloud Track)

    - + cvc5-gg 0 118050.0411101515 @@ -146,7 +146,7 @@

    QF_NIA (Cloud Track)

    - + Par4n 0 57273.7595505610 @@ -155,7 +155,7 @@

    QF_NIA (Cloud Track)

    - + cvc5-gg 0 110850.0411109615 @@ -175,7 +175,7 @@

    QF_NIA (Cloud Track)

    - + Par4n 0 17.23810101510 @@ -184,7 +184,7 @@

    QF_NIA (Cloud Track)

    - + cvc5-gg 0 01200.000011515 @@ -204,7 +204,7 @@

    QF_NIA (Cloud Track)

    - + Par4n 0 3324.8933211313 @@ -213,7 +213,7 @@

    QF_NIA (Cloud Track)

    - + cvc5-gg 0 0384.00001616 @@ -237,7 +237,6 @@

    QF_NIA (Cloud Track)

    - + - diff --git a/archive/2021/results/qf-nia-incremental.html b/archive/2021/results/qf-nia-incremental.html index 278c90c4..6a0fdbc1 100644 --- a/archive/2021/results/qf-nia-incremental.html +++ b/archive/2021/results/qf-nia-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_NIA (Incremental Track)

    Competition results for the QF_NIA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_NIA (Incremental Track)

    Parallel Performance SMTInterpol - - + + @@ -124,7 +124,7 @@

    QF_NIA (Incremental Track)

    - + MathSAT5n 0 41816571715.6471472.60300 @@ -133,7 +133,7 @@

    QF_NIA (Incremental Track)

    - + 2020-MathSAT5n 0 41816571727.5011483.18500 @@ -142,7 +142,7 @@

    QF_NIA (Incremental Track)

    - + z3n 0 41816572330.3522115.3500 @@ -151,7 +151,7 @@

    QF_NIA (Incremental Track)

    - + SMTInterpol 0 41816573755.8172828.91100 @@ -160,7 +160,7 @@

    QF_NIA (Incremental Track)

    - + cvc5-inc 0 34484107606.1847368.3077332471 @@ -169,7 +169,7 @@

    QF_NIA (Incremental Track)

    - + Yices2 incremental 0 19303912000.012000.0398861810 @@ -193,7 +193,6 @@

    QF_NIA (Incremental Track)

    - + - diff --git a/archive/2021/results/qf-nia-parallel.html b/archive/2021/results/qf-nia-parallel.html index c3d63947..ad7222cc 100644 --- a/archive/2021/results/qf-nia-parallel.html +++ b/archive/2021/results/qf-nia-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_NIA (Parallel Track)

    Competition results for the QF_NIA - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    QF_NIA (Parallel Track)

    - + Par4n 0 614439.0116511111 @@ -126,7 +126,7 @@

    QF_NIA (Parallel Track)

    - + cvc5-gg 0 116850.6391101614 @@ -146,7 +146,7 @@

    QF_NIA (Parallel Track)

    - + Par4n 0 57231.7715505711 @@ -155,7 +155,7 @@

    QF_NIA (Parallel Track)

    - + cvc5-gg 0 110850.6391109714 @@ -175,7 +175,7 @@

    QF_NIA (Parallel Track)

    - + Par4n 0 11207.2410111511 @@ -184,7 +184,7 @@

    QF_NIA (Parallel Track)

    - + cvc5-gg 0 01200.000021514 @@ -204,7 +204,7 @@

    QF_NIA (Parallel Track)

    - + Par4n 0 3348.7773211414 @@ -213,7 +213,7 @@

    QF_NIA (Parallel Track)

    - + cvc5-gg 0 0360.00001715 @@ -237,7 +237,6 @@

    QF_NIA (Parallel Track)

    - + - diff --git a/archive/2021/results/qf-nia-single-query.html b/archive/2021/results/qf-nia-single-query.html index 20bb6fce..166739b8 100644 --- a/archive/2021/results/qf-nia-single-query.html +++ b/archive/2021/results/qf-nia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_NIA (Single Query Track)

    Competition results for the QF_NIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_NIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + Yices2 - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_NIA (Single Query Track)

    - + 2019-Par4n 0 9031 @@ -142,7 +142,7 @@

    QF_NIA (Single Query Track)

    - + z3n 0 8661 @@ -153,7 +153,7 @@

    QF_NIA (Single Query Track)

    - + MathSAT5n 0 8189 @@ -164,7 +164,7 @@

    QF_NIA (Single Query Track)

    - + cvc5 0 8174 @@ -175,7 +175,7 @@

    QF_NIA (Single Query Track)

    - + cvc5 - fixedn 0 8143 @@ -186,7 +186,7 @@

    QF_NIA (Single Query Track)

    - + Yices2 0 7805 @@ -197,7 +197,7 @@

    QF_NIA (Single Query Track)

    - + AProVE 0 3996 @@ -208,7 +208,7 @@

    QF_NIA (Single Query Track)

    - + SMT-RAT 0 2577 @@ -219,7 +219,7 @@

    QF_NIA (Single Query Track)

    - + 2020-SMT-RATn 0 2568 @@ -241,7 +241,7 @@

    QF_NIA (Single Query Track)

    - + 2019-Par4n 0 92233795805.1513162035.50992236190303324582420 @@ -250,7 +250,7 @@

    QF_NIA (Single Query Track)

    - + z3n 0 86614209608.5174208784.01586615803285830203011 @@ -259,7 +259,7 @@

    QF_NIA (Single Query Track)

    - + MathSAT5n 0 81894596009.8814594947.66381895508268134923492 @@ -268,7 +268,7 @@

    QF_NIA (Single Query Track)

    - + cvc5 0 81745117319.155121766.25581745681249335073507 @@ -277,7 +277,7 @@

    QF_NIA (Single Query Track)

    - + cvc5 - fixedn 0 81435131570.4265136531.07781435651249235383538 @@ -286,7 +286,7 @@

    QF_NIA (Single Query Track)

    - + Yices2 0 78054826618.0994826178.68878055113269238763875 @@ -295,7 +295,7 @@

    QF_NIA (Single Query Track)

    - + AProVE 0 39989229598.7139215415.01639983998076837417 @@ -304,7 +304,7 @@

    QF_NIA (Single Query Track)

    - + SMT-RAT 0 257611165392.50511165255.4882576228629091059068 @@ -313,7 +313,7 @@

    QF_NIA (Single Query Track)

    - + 2020-SMT-RATn 0 256811140501.78111141154.5252568228528391139040 @@ -333,7 +333,7 @@

    QF_NIA (Single Query Track)

    - + 2019-Par4n 0 61901203890.651731342.92461906190047250192420 @@ -342,7 +342,7 @@

    QF_NIA (Single Query Track)

    - + z3n 0 58031451813.7231451305.57258035803085950193011 @@ -351,7 +351,7 @@

    QF_NIA (Single Query Track)

    - + cvc5 0 56812005636.9182010126.24656815681098150193507 @@ -360,7 +360,7 @@

    QF_NIA (Single Query Track)

    - + cvc5 - fixedn 0 56512019785.5032024780.945565156510101150193538 @@ -369,7 +369,7 @@

    QF_NIA (Single Query Track)

    - + MathSAT5n 0 55081704489.6271703774.183550855080115450193492 @@ -378,7 +378,7 @@

    QF_NIA (Single Query Track)

    - + Yices2 0 51131987386.621987044.035511351130154950193875 @@ -387,7 +387,7 @@

    QF_NIA (Single Query Track)

    - + AProVE 0 39983485012.3523470915.682399839980266450197417 @@ -396,7 +396,7 @@

    QF_NIA (Single Query Track)

    - + SMT-RAT 0 22865503988.3335503836.19228622860437650199068 @@ -405,7 +405,7 @@

    QF_NIA (Single Query Track)

    - + 2020-SMT-RATn 0 22855500494.6085500862.774228522850437750199040 @@ -425,7 +425,7 @@

    QF_NIA (Single Query Track)

    - + 2019-Par4n 0 3033429391.37301159.15630330303321184372420 @@ -434,7 +434,7 @@

    QF_NIA (Single Query Track)

    - + z3n 0 2858634491.884634175.2928580285838684373011 @@ -443,7 +443,7 @@

    QF_NIA (Single Query Track)

    - + Yices2 0 2692710365.953710269.12326920269255284373875 @@ -452,7 +452,7 @@

    QF_NIA (Single Query Track)

    - + MathSAT5n 0 2681761520.253761173.4826810268156384373492 @@ -461,7 +461,7 @@

    QF_NIA (Single Query Track)

    - + cvc5 0 2493981682.233981640.00924930249375184373507 @@ -470,7 +470,7 @@

    QF_NIA (Single Query Track)

    - + cvc5 - fixedn 0 2492981784.923981750.13224920249275284373538 @@ -479,7 +479,7 @@

    QF_NIA (Single Query Track)

    - + SMT-RAT 0 2903531404.1723531419.2982900290295484379068 @@ -488,7 +488,7 @@

    QF_NIA (Single Query Track)

    - + 2020-SMT-RATn 0 2833510007.1733510291.7512830283296184379040 @@ -497,7 +497,7 @@

    QF_NIA (Single Query Track)

    - + AProVE 0 03697345.8633697271.491000324484377417 @@ -517,7 +517,7 @@

    QF_NIA (Single Query Track)

    - + 2019-Par4n 0 8181159297.647105042.50681815397278435003462 @@ -526,7 +526,7 @@

    QF_NIA (Single Query Track)

    - + Yices2 0 6825132140.927131930.87568254423240248564856 @@ -535,7 +535,7 @@

    QF_NIA (Single Query Track)

    - + MathSAT5n 0 6319156446.657155965.32463194076224353625362 @@ -544,7 +544,7 @@

    QF_NIA (Single Query Track)

    - + z3n 0 5974163396.976163018.92759743970200457075702 @@ -553,7 +553,7 @@

    QF_NIA (Single Query Track)

    - + cvc5 0 5838163419.844162954.72858383746209258435843 @@ -562,7 +562,7 @@

    QF_NIA (Single Query Track)

    - + cvc5 - fixedn 0 5837163363.229163034.79658373744209358445844 @@ -571,7 +571,7 @@

    QF_NIA (Single Query Track)

    - + AProVE 0 3056218973.345211486.17330563056086258359 @@ -580,7 +580,7 @@

    QF_NIA (Single Query Track)

    - + 2020-SMT-RATn 0 1652248360.552248264.64316521405247100299994 @@ -589,7 +589,7 @@

    QF_NIA (Single Query Track)

    - + SMT-RAT 0 1635248982.248248888.656163513912441004610021 @@ -613,7 +613,6 @@

    QF_NIA (Single Query Track)

    - + - diff --git a/archive/2021/results/qf-nia-unsat-core.html b/archive/2021/results/qf-nia-unsat-core.html index ce32fbcd..67a54698 100644 --- a/archive/2021/results/qf-nia-unsat-core.html +++ b/archive/2021/results/qf-nia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_NIA (Unsat Core Track)

    Competition results for the QF_NIA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_NIA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5-uccvc5-uc - - + + @@ -126,7 +126,7 @@

    QF_NIA (Unsat Core Track)

    - + z3n 0 27288 @@ -137,7 +137,7 @@

    QF_NIA (Unsat Core Track)

    - + MathSAT5n 0 25386 @@ -148,7 +148,7 @@

    QF_NIA (Unsat Core Track)

    - + cvc5-uc 0 25264 @@ -159,7 +159,7 @@

    QF_NIA (Unsat Core Track)

    - + Yices2 0 0 @@ -181,7 +181,7 @@

    QF_NIA (Unsat Core Track)

    - + z3n 0 2728891604.02791438.73329 @@ -190,7 +190,7 @@

    QF_NIA (Unsat Core Track)

    - + MathSAT5n 0 25386195251.699195080.448118 @@ -199,7 +199,7 @@

    QF_NIA (Unsat Core Track)

    - + cvc5-uc 0 25264599719.809599482.531388 @@ -208,7 +208,7 @@

    QF_NIA (Unsat Core Track)

    - + Yices2 0 06.97825.4780 @@ -232,7 +232,6 @@

    QF_NIA (Unsat Core Track)

    - + - diff --git a/archive/2021/results/qf-nira-single-query.html b/archive/2021/results/qf-nira-single-query.html index 580bbf8b..b3246a0e 100644 --- a/archive/2021/results/qf-nira-single-query.html +++ b/archive/2021/results/qf-nira-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_NIRA (Single Query Track)

    Competition results for the QF_NIRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_NIRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) SMT-RATSMT-RAT— - - + + SMT-RAT - - + + — - + @@ -131,7 +131,7 @@

    QF_NIRA (Single Query Track)

    - + 2020-SMT-RATn 0 2 @@ -142,7 +142,7 @@

    QF_NIRA (Single Query Track)

    - + SMT-RAT 0 2 @@ -153,7 +153,7 @@

    QF_NIRA (Single Query Track)

    - + z3n 0 1 @@ -164,7 +164,7 @@

    QF_NIRA (Single Query Track)

    - + cvc5 - fixedn 0 1 @@ -175,7 +175,7 @@

    QF_NIRA (Single Query Track)

    - + cvc5 0 1 @@ -186,7 +186,7 @@

    QF_NIRA (Single Query Track)

    - + MathSAT5n 0 0 @@ -197,7 +197,7 @@

    QF_NIRA (Single Query Track)

    - + Yices2 0 0 @@ -219,7 +219,7 @@

    QF_NIRA (Single Query Track)

    - + 2020-SMT-RATn 0 2253.35253.36820200 @@ -228,7 +228,7 @@

    QF_NIRA (Single Query Track)

    - + SMT-RAT 0 2290.624290.68820200 @@ -237,7 +237,7 @@

    QF_NIRA (Single Query Track)

    - + z3n 0 11201.9241201.92410111 @@ -246,7 +246,7 @@

    QF_NIRA (Single Query Track)

    - + cvc5 - fixedn 0 11237.0431237.08310111 @@ -255,7 +255,7 @@

    QF_NIRA (Single Query Track)

    - + cvc5 0 11239.4251239.4310111 @@ -264,7 +264,7 @@

    QF_NIRA (Single Query Track)

    - + Yices2 0 02400.02400.000022 @@ -273,7 +273,7 @@

    QF_NIRA (Single Query Track)

    - + MathSAT5n 0 02400.02400.000022 @@ -293,7 +293,7 @@

    QF_NIRA (Single Query Track)

    - + 2020-SMT-RATn 0 00.00.0000020 @@ -302,7 +302,7 @@

    QF_NIRA (Single Query Track)

    - + cvc5 0 00.00.0000021 @@ -311,7 +311,7 @@

    QF_NIRA (Single Query Track)

    - + SMT-RAT 0 00.00.0000020 @@ -320,7 +320,7 @@

    QF_NIRA (Single Query Track)

    - + Yices2 0 00.00.0000022 @@ -329,7 +329,7 @@

    QF_NIRA (Single Query Track)

    - + MathSAT5n 0 00.00.0000022 @@ -338,7 +338,7 @@

    QF_NIRA (Single Query Track)

    - + z3n 0 00.00.0000021 @@ -347,7 +347,7 @@

    QF_NIRA (Single Query Track)

    - + cvc5 - fixedn 0 00.00.0000021 @@ -367,7 +367,7 @@

    QF_NIRA (Single Query Track)

    - + 2020-SMT-RATn 0 2253.35253.368202000 @@ -376,7 +376,7 @@

    QF_NIRA (Single Query Track)

    - + SMT-RAT 0 2290.624290.688202000 @@ -385,7 +385,7 @@

    QF_NIRA (Single Query Track)

    - + z3n 0 11201.9241201.924101101 @@ -394,7 +394,7 @@

    QF_NIRA (Single Query Track)

    - + cvc5 - fixedn 0 11237.0431237.083101101 @@ -403,7 +403,7 @@

    QF_NIRA (Single Query Track)

    - + cvc5 0 11239.4251239.43101101 @@ -412,7 +412,7 @@

    QF_NIRA (Single Query Track)

    - + Yices2 0 02400.02400.0000202 @@ -421,7 +421,7 @@

    QF_NIRA (Single Query Track)

    - + MathSAT5n 0 02400.02400.0000202 @@ -441,7 +441,7 @@

    QF_NIRA (Single Query Track)

    - + z3n 0 125.92425.92410111 @@ -450,7 +450,7 @@

    QF_NIRA (Single Query Track)

    - + 2020-SMT-RATn 0 048.048.000022 @@ -459,7 +459,7 @@

    QF_NIRA (Single Query Track)

    - + cvc5 0 048.048.000022 @@ -468,7 +468,7 @@

    QF_NIRA (Single Query Track)

    - + SMT-RAT 0 048.048.000022 @@ -477,7 +477,7 @@

    QF_NIRA (Single Query Track)

    - + Yices2 0 048.048.000022 @@ -486,7 +486,7 @@

    QF_NIRA (Single Query Track)

    - + MathSAT5n 0 048.048.000022 @@ -495,7 +495,7 @@

    QF_NIRA (Single Query Track)

    - + cvc5 - fixedn 0 048.048.000022 @@ -519,7 +519,6 @@

    QF_NIRA (Single Query Track)

    - + - diff --git a/archive/2021/results/qf-nira-unsat-core.html b/archive/2021/results/qf-nira-unsat-core.html index d6e1afb9..484c96ef 100644 --- a/archive/2021/results/qf-nira-unsat-core.html +++ b/archive/2021/results/qf-nira-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_NIRA (Unsat Core Track)

    Competition results for the QF_NIRA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_NIRA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5-uccvc5-uc - - + + @@ -126,7 +126,7 @@

    QF_NIRA (Unsat Core Track)

    - + z3n 0 27 @@ -137,7 +137,7 @@

    QF_NIRA (Unsat Core Track)

    - + MathSAT5n 0 26 @@ -148,7 +148,7 @@

    QF_NIRA (Unsat Core Track)

    - + cvc5-uc 0 24 @@ -159,7 +159,7 @@

    QF_NIRA (Unsat Core Track)

    - + Yices2 0 0 @@ -181,7 +181,7 @@

    QF_NIRA (Unsat Core Track)

    - + z3n 0 2755.48855.4910 @@ -190,7 +190,7 @@

    QF_NIRA (Unsat Core Track)

    - + MathSAT5n 0 2621.48421.4910 @@ -199,7 +199,7 @@

    QF_NIRA (Unsat Core Track)

    - + cvc5-uc 0 24947.18947.4370 @@ -208,7 +208,7 @@

    QF_NIRA (Unsat Core Track)

    - + Yices2 0 00.0070.0240 @@ -232,7 +232,6 @@

    QF_NIRA (Unsat Core Track)

    - + - diff --git a/archive/2021/results/qf-nonlinearintarith-cloud.html b/archive/2021/results/qf-nonlinearintarith-cloud.html index cafb3c68..02e682d4 100644 --- a/archive/2021/results/qf-nonlinearintarith-cloud.html +++ b/archive/2021/results/qf-nonlinearintarith-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_NonLinearIntArith (Cloud Track)

    Competition results for the QF_NonLinearIntArith - + division - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    QF_NonLinearIntArith (Cloud Track)

    - + Par4n 0 613280.99765110010 @@ -126,7 +126,7 @@

    QF_NonLinearIntArith (Cloud Track)

    - + cvc5-gg 0 118050.04111015015 @@ -146,7 +146,7 @@

    QF_NonLinearIntArith (Cloud Track)

    - + Par4n 0 57273.7595505610 @@ -155,7 +155,7 @@

    QF_NonLinearIntArith (Cloud Track)

    - + cvc5-gg 0 110850.0411109615 @@ -175,7 +175,7 @@

    QF_NonLinearIntArith (Cloud Track)

    - + Par4n 0 17.23810101510 @@ -184,7 +184,7 @@

    QF_NonLinearIntArith (Cloud Track)

    - + cvc5-gg 0 01200.000011515 @@ -204,7 +204,7 @@

    QF_NonLinearIntArith (Cloud Track)

    - + Par4n 0 3324.89332113013 @@ -213,7 +213,7 @@

    QF_NonLinearIntArith (Cloud Track)

    - + cvc5-gg 0 0384.000016016 @@ -237,7 +237,6 @@

    QF_NonLinearIntArith (Cloud Track)

    - + - diff --git a/archive/2021/results/qf-nonlinearintarith-incremental.html b/archive/2021/results/qf-nonlinearintarith-incremental.html index 6eb319f3..a0a54dcf 100644 --- a/archive/2021/results/qf-nonlinearintarith-incremental.html +++ b/archive/2021/results/qf-nonlinearintarith-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_NonLinearIntArith (Incremental Track)

    Competition results for the QF_NonLinearIntArith - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_NonLinearIntArith (Incremental Track)

    Parallel Performance SMTInterpol - - + + @@ -124,7 +124,7 @@

    QF_NonLinearIntArith (Incremental Track)

    - + MathSAT5n 0 41816571715.6471472.603000 @@ -133,7 +133,7 @@

    QF_NonLinearIntArith (Incremental Track)

    - + 2020-MathSAT5n 0 41816571727.5011483.185000 @@ -142,7 +142,7 @@

    QF_NonLinearIntArith (Incremental Track)

    - + z3n 0 41816572330.3522115.35000 @@ -151,7 +151,7 @@

    QF_NonLinearIntArith (Incremental Track)

    - + SMTInterpol 0 41816573755.8172828.911000 @@ -160,7 +160,7 @@

    QF_NonLinearIntArith (Incremental Track)

    - + cvc5-inc 0 34484107606.1847368.30773324701 @@ -169,7 +169,7 @@

    QF_NonLinearIntArith (Incremental Track)

    - + Yices2 incremental 0 19303912000.012000.03988618010 @@ -193,7 +193,6 @@

    QF_NonLinearIntArith (Incremental Track)

    - + - diff --git a/archive/2021/results/qf-nonlinearintarith-parallel.html b/archive/2021/results/qf-nonlinearintarith-parallel.html index d575314b..72e9ed8e 100644 --- a/archive/2021/results/qf-nonlinearintarith-parallel.html +++ b/archive/2021/results/qf-nonlinearintarith-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_NonLinearIntArith (Parallel Track)

    Competition results for the QF_NonLinearIntArith - + division - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    QF_NonLinearIntArith (Parallel Track)

    - + Par4n 0 614439.01165111011 @@ -126,7 +126,7 @@

    QF_NonLinearIntArith (Parallel Track)

    - + cvc5-gg 0 116850.63911016014 @@ -146,7 +146,7 @@

    QF_NonLinearIntArith (Parallel Track)

    - + Par4n 0 57231.7715505711 @@ -155,7 +155,7 @@

    QF_NonLinearIntArith (Parallel Track)

    - + cvc5-gg 0 110850.6391109714 @@ -175,7 +175,7 @@

    QF_NonLinearIntArith (Parallel Track)

    - + Par4n 0 11207.2410111511 @@ -184,7 +184,7 @@

    QF_NonLinearIntArith (Parallel Track)

    - + cvc5-gg 0 01200.000021514 @@ -204,7 +204,7 @@

    QF_NonLinearIntArith (Parallel Track)

    - + Par4n 0 3348.77732114014 @@ -213,7 +213,7 @@

    QF_NonLinearIntArith (Parallel Track)

    - + cvc5-gg 0 0360.000017015 @@ -237,7 +237,6 @@

    QF_NonLinearIntArith (Parallel Track)

    - + - diff --git a/archive/2021/results/qf-nonlinearintarith-single-query.html b/archive/2021/results/qf-nonlinearintarith-single-query.html index e413a235..e67c4faa 100644 --- a/archive/2021/results/qf-nonlinearintarith-single-query.html +++ b/archive/2021/results/qf-nonlinearintarith-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_NonLinearIntArith (Single Query Track)

    Competition results for the QF_NonLinearIntArith - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_NonLinearIntArith (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + Yices2 - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + 2019-Par4n 0 9031 @@ -142,7 +142,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + z3n 0 8662 @@ -153,7 +153,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + MathSAT5n 0 8189 @@ -164,7 +164,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + cvc5 0 8175 @@ -175,7 +175,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + cvc5 - fixedn 0 8144 @@ -186,7 +186,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + Yices2 0 7805 @@ -197,7 +197,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + AProVE 0 3996 @@ -208,7 +208,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + SMT-RAT 0 2579 @@ -219,7 +219,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + 2020-SMT-RATn 0 2570 @@ -241,7 +241,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + 2019-Par4n 0 92233795805.1513162035.509922361903033245822420 @@ -250,7 +250,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + z3n 0 86624210810.4414209985.94866258032859302103012 @@ -259,7 +259,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + MathSAT5n 0 81894598409.8814597347.663818955082681349403494 @@ -268,7 +268,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + cvc5 0 81755118558.5755123005.685817556812494350803508 @@ -277,7 +277,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + cvc5 - fixedn 0 81445132807.4695137768.16814456512493353903539 @@ -286,7 +286,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + Yices2 0 78054829018.0994828578.688780551132692387803877 @@ -295,7 +295,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + AProVE 0 39989229598.7139215415.016399839980768327417 @@ -304,7 +304,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + SMT-RAT 0 257811165683.12911165546.17625782286292910509068 @@ -313,7 +313,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + 2020-SMT-RATn 0 257011140755.13111141407.89325702285285911309040 @@ -333,7 +333,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + 2019-Par4n 0 61901203890.651731342.92461906190047250212420 @@ -342,7 +342,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + z3n 0 58031451813.7231451305.57258035803085950213012 @@ -351,7 +351,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + cvc5 0 56812005636.9182010126.24656815681098150213508 @@ -360,7 +360,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + cvc5 - fixedn 0 56512019785.5032024780.945565156510101150213539 @@ -369,7 +369,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + MathSAT5n 0 55081704489.6271703774.183550855080115450213494 @@ -378,7 +378,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + Yices2 0 51131987386.621987044.035511351130154950213877 @@ -387,7 +387,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + AProVE 0 39983485012.3523470915.682399839980266450217417 @@ -396,7 +396,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + SMT-RAT 0 22865503988.3335503836.19228622860437650219068 @@ -405,7 +405,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + 2020-SMT-RATn 0 22855500494.6085500862.774228522850437750219040 @@ -425,7 +425,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + 2019-Par4n 0 3033429391.37301159.15630330303321184392420 @@ -434,7 +434,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + z3n 0 2859635693.808635377.21528590285938784373012 @@ -443,7 +443,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + Yices2 0 2692712765.953712669.12326920269255484373877 @@ -452,7 +452,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + MathSAT5n 0 2681763920.253763573.4826810268156584373494 @@ -461,7 +461,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + cvc5 0 2494982921.657982879.43924940249475284373508 @@ -470,7 +470,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + cvc5 - fixedn 0 2493983021.966982987.21424930249375384373539 @@ -479,7 +479,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + SMT-RAT 0 2923531694.7963531709.9862920292295484379068 @@ -488,7 +488,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + 2020-SMT-RATn 0 2853510260.5233510545.1192850285296184379040 @@ -497,7 +497,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + AProVE 0 03697345.8633697271.491000324484397417 @@ -517,7 +517,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + 2019-Par4n 0 8181159297.647105042.506818153972784350023462 @@ -526,7 +526,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + Yices2 0 6825132188.927131978.875682544232402485804858 @@ -535,7 +535,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + MathSAT5n 0 6319156494.657156013.324631940762243536405364 @@ -544,7 +544,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + z3n 0 5975163422.9163044.852597539702005570805703 @@ -553,7 +553,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + cvc5 0 5838163467.844163002.728583837462092584505845 @@ -562,7 +562,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + cvc5 - fixedn 0 5837163411.229163082.796583737442093584605846 @@ -571,7 +571,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + AProVE 0 3056218973.345211486.173305630560862528359 @@ -580,7 +580,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + 2020-SMT-RATn 0 1652248408.552248312.643165214052471003109996 @@ -589,7 +589,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + SMT-RAT 0 1635249030.248248936.6561635139124410048010023 @@ -613,7 +613,6 @@

    QF_NonLinearIntArith (Single Query Track)

    - + - diff --git a/archive/2021/results/qf-nonlinearintarith-unsat-core.html b/archive/2021/results/qf-nonlinearintarith-unsat-core.html index 3fa77a47..11691530 100644 --- a/archive/2021/results/qf-nonlinearintarith-unsat-core.html +++ b/archive/2021/results/qf-nonlinearintarith-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_NonLinearIntArith (Unsat Core Track)

    Competition results for the QF_NonLinearIntArith - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_NonLinearIntArith (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5-uccvc5-uc - - + + @@ -126,7 +126,7 @@

    QF_NonLinearIntArith (Unsat Core Track)

    - + z3n 0 27315 @@ -137,7 +137,7 @@

    QF_NonLinearIntArith (Unsat Core Track)

    - + MathSAT5n 0 25412 @@ -148,7 +148,7 @@

    QF_NonLinearIntArith (Unsat Core Track)

    - + cvc5-uc 0 25288 @@ -159,7 +159,7 @@

    QF_NonLinearIntArith (Unsat Core Track)

    - + Yices2 0 0 @@ -181,7 +181,7 @@

    QF_NonLinearIntArith (Unsat Core Track)

    - + z3n 0 2731591659.51491494.22529 @@ -190,7 +190,7 @@

    QF_NonLinearIntArith (Unsat Core Track)

    - + MathSAT5n 0 25412195273.184195101.939118 @@ -199,7 +199,7 @@

    QF_NonLinearIntArith (Unsat Core Track)

    - + cvc5-uc 0 25288600666.989600429.968388 @@ -208,7 +208,7 @@

    QF_NonLinearIntArith (Unsat Core Track)

    - + Yices2 0 06.98425.5020 @@ -232,7 +232,6 @@

    QF_NonLinearIntArith (Unsat Core Track)

    - + - diff --git a/archive/2021/results/qf-nonlinearrealarith-cloud.html b/archive/2021/results/qf-nonlinearrealarith-cloud.html index 7e390db8..d1aa3c9c 100644 --- a/archive/2021/results/qf-nonlinearrealarith-cloud.html +++ b/archive/2021/results/qf-nonlinearrealarith-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_NonLinearRealArith (Cloud Track)

    Competition results for the QF_NonLinearRealArith - + division - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    QF_NonLinearRealArith (Cloud Track)

    - + Par4n 0 516541.37853211011 @@ -126,7 +126,7 @@

    QF_NonLinearRealArith (Cloud Track)

    - + cvc5-gg 0 215643.11121114013 @@ -146,7 +146,7 @@

    QF_NonLinearRealArith (Cloud Track)

    - + Par4n 0 36054.90333031011 @@ -155,7 +155,7 @@

    QF_NonLinearRealArith (Cloud Track)

    - + cvc5-gg 0 16003.40711051013 @@ -175,7 +175,7 @@

    QF_NonLinearRealArith (Cloud Track)

    - + Par4n 0 2886.47520201411 @@ -184,7 +184,7 @@

    QF_NonLinearRealArith (Cloud Track)

    - + cvc5-gg 0 11239.70410111413 @@ -204,7 +204,7 @@

    QF_NonLinearRealArith (Cloud Track)

    - + cvc5-gg 0 1339.40711015014 @@ -213,7 +213,7 @@

    QF_NonLinearRealArith (Cloud Track)

    - + Par4n 0 0384.000016016 @@ -237,7 +237,6 @@

    QF_NonLinearRealArith (Cloud Track)

    - + - diff --git a/archive/2021/results/qf-nonlinearrealarith-parallel.html b/archive/2021/results/qf-nonlinearrealarith-parallel.html index 29a86cc1..1746ed0b 100644 --- a/archive/2021/results/qf-nonlinearrealarith-parallel.html +++ b/archive/2021/results/qf-nonlinearrealarith-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_NonLinearRealArith (Parallel Track)

    Competition results for the QF_NonLinearRealArith - + division - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    QF_NonLinearRealArith (Parallel Track)

    - + Par4n 0 716436.70475210010 @@ -126,7 +126,7 @@

    QF_NonLinearRealArith (Parallel Track)

    - + cvc5-gg 0 27242.7552111506 @@ -146,7 +146,7 @@

    QF_NonLinearRealArith (Parallel Track)

    - + Par4n 0 55957.28155021010 @@ -155,7 +155,7 @@

    QF_NonLinearRealArith (Parallel Track)

    - + cvc5-gg 0 14802.4711106106 @@ -175,7 +175,7 @@

    QF_NonLinearRealArith (Parallel Track)

    - + Par4n 0 2879.42320201510 @@ -184,7 +184,7 @@

    QF_NonLinearRealArith (Parallel Track)

    - + cvc5-gg 0 140.2841011156 @@ -204,7 +204,7 @@

    QF_NonLinearRealArith (Parallel Track)

    - + cvc5-gg 0 1170.4711101607 @@ -213,7 +213,7 @@

    QF_NonLinearRealArith (Parallel Track)

    - + Par4n 0 1387.12211016016 @@ -237,7 +237,6 @@

    QF_NonLinearRealArith (Parallel Track)

    - + - diff --git a/archive/2021/results/qf-nonlinearrealarith-single-query.html b/archive/2021/results/qf-nonlinearrealarith-single-query.html index 8a44eeb1..cf231a38 100644 --- a/archive/2021/results/qf-nonlinearrealarith-single-query.html +++ b/archive/2021/results/qf-nonlinearrealarith-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_NonLinearRealArith (Single Query Track)

    Competition results for the QF_NonLinearRealArith - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_NonLinearRealArith (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + 2019-Par4n 0 2481 @@ -142,7 +142,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + cvc5 0 2424 @@ -153,7 +153,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + Yices2 0 2207 @@ -164,7 +164,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + z3n 0 2141 @@ -175,7 +175,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + SMT-RAT-MCSAT 0 2002 @@ -186,7 +186,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + veriT+raSAT+Redlog 0 1772 @@ -197,7 +197,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + MathSAT5n 0 1587 @@ -219,7 +219,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + 2019-Par4n 0 2509422005.372346718.1122509118713222570194 @@ -228,7 +228,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + cvc5 0 2424493909.782493999.2782424113512893420342 @@ -237,7 +237,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + Yices2 0 2207706438.053706374.7132207103311745590559 @@ -246,7 +246,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + z3n 0 2141711686.449711977.2322141108910526250491 @@ -255,7 +255,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + SMT-RAT-MCSAT 0 2002940988.825940862.621200298810147640728 @@ -264,7 +264,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + veriT+raSAT+Redlog 0 17721213262.8261212982.46217728169569940994 @@ -273,7 +273,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + MathSAT5n 0 15871449868.0141449737.36415874651122117901179 @@ -293,7 +293,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + 2019-Par4n 0 1187210403.901159926.4241187118701121467194 @@ -302,7 +302,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + cvc5 0 1135231251.989231249.2591135113501641467342 @@ -311,7 +311,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + z3n 0 1089239100.604239080.551089108902101467491 @@ -320,7 +320,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + Yices2 0 1033331488.834331477.0181033103302661467559 @@ -329,7 +329,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + SMT-RAT-MCSAT 0 988372472.063372388.66698898803111467728 @@ -338,7 +338,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + veriT+raSAT+Redlog 0 816594180.479594005.81981681604831467994 @@ -347,7 +347,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + MathSAT5n 0 4651016302.7141016229.61465465083414671179 @@ -367,7 +367,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + 2019-Par4n 0 132258001.47133191.688132201322171427194 @@ -376,7 +376,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + cvc5 0 1289109057.793109150.019128901289501427342 @@ -385,7 +385,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + Yices2 0 1174221349.219221297.6941174011741651427559 @@ -394,7 +394,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + MathSAT5n 0 1122279965.3279907.75411220112221714271179 @@ -403,7 +403,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + z3n 0 1052324583.113324893.2841052010522871427491 @@ -412,7 +412,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + SMT-RAT-MCSAT 0 1014415673.261415630.3511014010143251427728 @@ -421,7 +421,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + veriT+raSAT+Redlog 0 956465482.346465376.64295609563831427994 @@ -441,7 +441,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + 2019-Par4n 0 234617304.63412642.7722346111212344200357 @@ -450,7 +450,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + cvc5 0 219916580.2516556.8892199104811515670567 @@ -459,7 +459,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + Yices2 0 206018191.29618166.571206098210787060706 @@ -468,7 +468,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + z3n 0 204821152.21721090.43204810509987180709 @@ -477,7 +477,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + SMT-RAT-MCSAT 0 182325484.50825422.95718239468779430930 @@ -486,7 +486,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + veriT+raSAT+Redlog 0 171026415.12826370.6971710779931105601056 @@ -495,7 +495,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + MathSAT5n 0 142535121.40935084.27614254071018134101341 @@ -519,7 +519,6 @@

    QF_NonLinearRealArith (Single Query Track)

    - + - diff --git a/archive/2021/results/qf-nonlinearrealarith-unsat-core.html b/archive/2021/results/qf-nonlinearrealarith-unsat-core.html index 610f6ec7..6ded91d3 100644 --- a/archive/2021/results/qf-nonlinearrealarith-unsat-core.html +++ b/archive/2021/results/qf-nonlinearrealarith-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_NonLinearRealArith (Unsat Core Track)

    Competition results for the QF_NonLinearRealArith - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_NonLinearRealArith (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5-uccvc5-uc - - + + @@ -126,7 +126,7 @@

    QF_NonLinearRealArith (Unsat Core Track)

    - + cvc5-uc 0 177623 @@ -137,7 +137,7 @@

    QF_NonLinearRealArith (Unsat Core Track)

    - + MathSAT5n 0 171426 @@ -148,7 +148,7 @@

    QF_NonLinearRealArith (Unsat Core Track)

    - + z3n 0 154028 @@ -159,7 +159,7 @@

    QF_NonLinearRealArith (Unsat Core Track)

    - + Yices2 0 0 @@ -181,7 +181,7 @@

    QF_NonLinearRealArith (Unsat Core Track)

    - + cvc5-uc 0 17762329853.49929781.40916 @@ -190,7 +190,7 @@

    QF_NonLinearRealArith (Unsat Core Track)

    - + MathSAT5n 0 17142662409.57562412.77941 @@ -199,7 +199,7 @@

    QF_NonLinearRealArith (Unsat Core Track)

    - + z3n 0 15402891604.64291613.44850 @@ -208,7 +208,7 @@

    QF_NonLinearRealArith (Unsat Core Track)

    - + Yices2 0 00.8282.90 @@ -232,7 +232,6 @@

    QF_NonLinearRealArith (Unsat Core Track)

    - + - diff --git a/archive/2021/results/qf-nra-cloud.html b/archive/2021/results/qf-nra-cloud.html index 982f812a..084bd9a6 100644 --- a/archive/2021/results/qf-nra-cloud.html +++ b/archive/2021/results/qf-nra-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_NRA (Cloud Track)

    Competition results for the QF_NRA - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    QF_NRA (Cloud Track)

    - + Par4n 0 516541.3785321111 @@ -126,7 +126,7 @@

    QF_NRA (Cloud Track)

    - + cvc5-gg 0 215643.1112111413 @@ -146,7 +146,7 @@

    QF_NRA (Cloud Track)

    - + Par4n 0 36054.90333031011 @@ -155,7 +155,7 @@

    QF_NRA (Cloud Track)

    - + cvc5-gg 0 16003.40711051013 @@ -175,7 +175,7 @@

    QF_NRA (Cloud Track)

    - + Par4n 0 2886.47520201411 @@ -184,7 +184,7 @@

    QF_NRA (Cloud Track)

    - + cvc5-gg 0 11239.70410111413 @@ -204,7 +204,7 @@

    QF_NRA (Cloud Track)

    - + cvc5-gg 0 1339.4071101514 @@ -213,7 +213,7 @@

    QF_NRA (Cloud Track)

    - + Par4n 0 0384.00001616 @@ -237,7 +237,6 @@

    QF_NRA (Cloud Track)

    - + - diff --git a/archive/2021/results/qf-nra-parallel.html b/archive/2021/results/qf-nra-parallel.html index 546c1bd9..3c3770f6 100644 --- a/archive/2021/results/qf-nra-parallel.html +++ b/archive/2021/results/qf-nra-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_NRA (Parallel Track)

    Competition results for the QF_NRA - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    QF_NRA (Parallel Track)

    - + Par4n 0 716436.7047521010 @@ -126,7 +126,7 @@

    QF_NRA (Parallel Track)

    - + cvc5-gg 0 27242.755211156 @@ -146,7 +146,7 @@

    QF_NRA (Parallel Track)

    - + Par4n 0 55957.28155021010 @@ -155,7 +155,7 @@

    QF_NRA (Parallel Track)

    - + cvc5-gg 0 14802.4711106106 @@ -175,7 +175,7 @@

    QF_NRA (Parallel Track)

    - + Par4n 0 2879.42320201510 @@ -184,7 +184,7 @@

    QF_NRA (Parallel Track)

    - + cvc5-gg 0 140.2841011156 @@ -204,7 +204,7 @@

    QF_NRA (Parallel Track)

    - + cvc5-gg 0 1170.471110167 @@ -213,7 +213,7 @@

    QF_NRA (Parallel Track)

    - + Par4n 0 1387.1221101616 @@ -237,7 +237,6 @@

    QF_NRA (Parallel Track)

    - + - diff --git a/archive/2021/results/qf-nra-single-query.html b/archive/2021/results/qf-nra-single-query.html index 32ea8c45..ba1bad5a 100644 --- a/archive/2021/results/qf-nra-single-query.html +++ b/archive/2021/results/qf-nra-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_NRA (Single Query Track)

    Competition results for the QF_NRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_NRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    QF_NRA (Single Query Track)

    - + 2019-Par4n 0 2481 @@ -142,7 +142,7 @@

    QF_NRA (Single Query Track)

    - + cvc5 0 2424 @@ -153,7 +153,7 @@

    QF_NRA (Single Query Track)

    - + Yices2 0 2207 @@ -164,7 +164,7 @@

    QF_NRA (Single Query Track)

    - + z3n 0 2141 @@ -175,7 +175,7 @@

    QF_NRA (Single Query Track)

    - + SMT-RAT-MCSAT 0 2002 @@ -186,7 +186,7 @@

    QF_NRA (Single Query Track)

    - + veriT+raSAT+Redlog 0 1772 @@ -197,7 +197,7 @@

    QF_NRA (Single Query Track)

    - + MathSAT5n 0 1587 @@ -219,7 +219,7 @@

    QF_NRA (Single Query Track)

    - + 2019-Par4n 0 2509422005.372346718.112250911871322257194 @@ -228,7 +228,7 @@

    QF_NRA (Single Query Track)

    - + cvc5 0 2424493909.782493999.278242411351289342342 @@ -237,7 +237,7 @@

    QF_NRA (Single Query Track)

    - + Yices2 0 2207706438.053706374.713220710331174559559 @@ -246,7 +246,7 @@

    QF_NRA (Single Query Track)

    - + z3n 0 2141711686.449711977.232214110891052625491 @@ -255,7 +255,7 @@

    QF_NRA (Single Query Track)

    - + SMT-RAT-MCSAT 0 2002940988.825940862.62120029881014764728 @@ -264,7 +264,7 @@

    QF_NRA (Single Query Track)

    - + veriT+raSAT+Redlog 0 17721213262.8261212982.4621772816956994994 @@ -273,7 +273,7 @@

    QF_NRA (Single Query Track)

    - + MathSAT5n 0 15871449868.0141449737.3641587465112211791179 @@ -293,7 +293,7 @@

    QF_NRA (Single Query Track)

    - + 2019-Par4n 0 1187210403.901159926.4241187118701121467194 @@ -302,7 +302,7 @@

    QF_NRA (Single Query Track)

    - + cvc5 0 1135231251.989231249.2591135113501641467342 @@ -311,7 +311,7 @@

    QF_NRA (Single Query Track)

    - + z3n 0 1089239100.604239080.551089108902101467491 @@ -320,7 +320,7 @@

    QF_NRA (Single Query Track)

    - + Yices2 0 1033331488.834331477.0181033103302661467559 @@ -329,7 +329,7 @@

    QF_NRA (Single Query Track)

    - + SMT-RAT-MCSAT 0 988372472.063372388.66698898803111467728 @@ -338,7 +338,7 @@

    QF_NRA (Single Query Track)

    - + veriT+raSAT+Redlog 0 816594180.479594005.81981681604831467994 @@ -347,7 +347,7 @@

    QF_NRA (Single Query Track)

    - + MathSAT5n 0 4651016302.7141016229.61465465083414671179 @@ -367,7 +367,7 @@

    QF_NRA (Single Query Track)

    - + 2019-Par4n 0 132258001.47133191.688132201322171427194 @@ -376,7 +376,7 @@

    QF_NRA (Single Query Track)

    - + cvc5 0 1289109057.793109150.019128901289501427342 @@ -385,7 +385,7 @@

    QF_NRA (Single Query Track)

    - + Yices2 0 1174221349.219221297.6941174011741651427559 @@ -394,7 +394,7 @@

    QF_NRA (Single Query Track)

    - + MathSAT5n 0 1122279965.3279907.75411220112221714271179 @@ -403,7 +403,7 @@

    QF_NRA (Single Query Track)

    - + z3n 0 1052324583.113324893.2841052010522871427491 @@ -412,7 +412,7 @@

    QF_NRA (Single Query Track)

    - + SMT-RAT-MCSAT 0 1014415673.261415630.3511014010143251427728 @@ -421,7 +421,7 @@

    QF_NRA (Single Query Track)

    - + veriT+raSAT+Redlog 0 956465482.346465376.64295609563831427994 @@ -441,7 +441,7 @@

    QF_NRA (Single Query Track)

    - + 2019-Par4n 0 234617304.63412642.772234611121234420357 @@ -450,7 +450,7 @@

    QF_NRA (Single Query Track)

    - + cvc5 0 219916580.2516556.889219910481151567567 @@ -459,7 +459,7 @@

    QF_NRA (Single Query Track)

    - + Yices2 0 206018191.29618166.57120609821078706706 @@ -468,7 +468,7 @@

    QF_NRA (Single Query Track)

    - + z3n 0 204821152.21721090.4320481050998718709 @@ -477,7 +477,7 @@

    QF_NRA (Single Query Track)

    - + SMT-RAT-MCSAT 0 182325484.50825422.9571823946877943930 @@ -486,7 +486,7 @@

    QF_NRA (Single Query Track)

    - + veriT+raSAT+Redlog 0 171026415.12826370.697171077993110561056 @@ -495,7 +495,7 @@

    QF_NRA (Single Query Track)

    - + MathSAT5n 0 142535121.40935084.2761425407101813411341 @@ -519,7 +519,6 @@

    QF_NRA (Single Query Track)

    - + - diff --git a/archive/2021/results/qf-nra-unsat-core.html b/archive/2021/results/qf-nra-unsat-core.html index 8979a677..c8c2cefe 100644 --- a/archive/2021/results/qf-nra-unsat-core.html +++ b/archive/2021/results/qf-nra-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_NRA (Unsat Core Track)

    Competition results for the QF_NRA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_NRA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5-uccvc5-uc - - + + @@ -126,7 +126,7 @@

    QF_NRA (Unsat Core Track)

    - + cvc5-uc 0 177623 @@ -137,7 +137,7 @@

    QF_NRA (Unsat Core Track)

    - + MathSAT5n 0 171426 @@ -148,7 +148,7 @@

    QF_NRA (Unsat Core Track)

    - + z3n 0 154028 @@ -159,7 +159,7 @@

    QF_NRA (Unsat Core Track)

    - + Yices2 0 0 @@ -181,7 +181,7 @@

    QF_NRA (Unsat Core Track)

    - + cvc5-uc 0 17762329853.49929781.40916 @@ -190,7 +190,7 @@

    QF_NRA (Unsat Core Track)

    - + MathSAT5n 0 17142662409.57562412.77941 @@ -199,7 +199,7 @@

    QF_NRA (Unsat Core Track)

    - + z3n 0 15402891604.64291613.44850 @@ -208,7 +208,7 @@

    QF_NRA (Unsat Core Track)

    - + Yices2 0 00.8282.90 @@ -232,7 +232,6 @@

    QF_NRA (Unsat Core Track)

    - + - diff --git a/archive/2021/results/qf-rdl-cloud.html b/archive/2021/results/qf-rdl-cloud.html index ec2a7ccc..853e977f 100644 --- a/archive/2021/results/qf-rdl-cloud.html +++ b/archive/2021/results/qf-rdl-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_RDL (Cloud Track)

    Competition results for the QF_RDL - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    QF_RDL (Cloud Track)

    - + SMTS cube-and-conquer 0 018000.00001515 @@ -126,7 +126,7 @@

    QF_RDL (Cloud Track)

    - + cvc5-gg 0 018000.00001515 @@ -135,7 +135,7 @@

    QF_RDL (Cloud Track)

    - + SMTS portfolio 0 018000.00001515 @@ -155,7 +155,7 @@

    QF_RDL (Cloud Track)

    - + SMTS cube-and-conquer 0 00.000001515 @@ -164,7 +164,7 @@

    QF_RDL (Cloud Track)

    - + cvc5-gg 0 00.000001515 @@ -173,7 +173,7 @@

    QF_RDL (Cloud Track)

    - + SMTS portfolio 0 00.000001515 @@ -193,7 +193,7 @@

    QF_RDL (Cloud Track)

    - + SMTS cube-and-conquer 0 01200.000011415 @@ -202,7 +202,7 @@

    QF_RDL (Cloud Track)

    - + cvc5-gg 0 01200.000011415 @@ -211,7 +211,7 @@

    QF_RDL (Cloud Track)

    - + SMTS portfolio 0 01200.000011415 @@ -231,7 +231,7 @@

    QF_RDL (Cloud Track)

    - + SMTS cube-and-conquer 0 0360.00001515 @@ -240,7 +240,7 @@

    QF_RDL (Cloud Track)

    - + cvc5-gg 0 0360.00001515 @@ -249,7 +249,7 @@

    QF_RDL (Cloud Track)

    - + SMTS portfolio 0 0360.00001515 @@ -273,7 +273,6 @@

    QF_RDL (Cloud Track)

    - + - diff --git a/archive/2021/results/qf-rdl-model-validation.html b/archive/2021/results/qf-rdl-model-validation.html index 5ec27d99..e72ebea9 100644 --- a/archive/2021/results/qf-rdl-model-validation.html +++ b/archive/2021/results/qf-rdl-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_RDL (Model Validation Track)

    Competition results for the QF_RDL - + logic - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_RDL (Model Validation Track)

    Sequential PerformanceParallel Performance Yices2 model-validationYices2 model-validation - - + + @@ -126,7 +126,7 @@

    QF_RDL (Model Validation Track)

    - + Yices2 model-validation 0 109 @@ -137,7 +137,7 @@

    QF_RDL (Model Validation Track)

    - + 2020-Yices2-fixed Model Validationn 0 109 @@ -148,7 +148,7 @@

    QF_RDL (Model Validation Track)

    - + cvc5-mv 0 107 @@ -159,7 +159,7 @@

    QF_RDL (Model Validation Track)

    - + MathSAT5n 0 107 @@ -170,7 +170,7 @@

    QF_RDL (Model Validation Track)

    - + z3-mvn 0 104 @@ -181,7 +181,7 @@

    QF_RDL (Model Validation Track)

    - + SMTInterpol 0 103 @@ -192,7 +192,7 @@

    QF_RDL (Model Validation Track)

    - + OpenSMT 0 100 @@ -214,7 +214,7 @@

    QF_RDL (Model Validation Track)

    - + Yices2 model-validation 0 109802.954801.5410 @@ -223,7 +223,7 @@

    QF_RDL (Model Validation Track)

    - + 2020-Yices2-fixed Model Validationn 0 109804.947804.0150 @@ -232,7 +232,7 @@

    QF_RDL (Model Validation Track)

    - + cvc5-mv 0 1076281.826282.7352 @@ -241,7 +241,7 @@

    QF_RDL (Model Validation Track)

    - + MathSAT5n 0 1076776.326777.0032 @@ -250,7 +250,7 @@

    QF_RDL (Model Validation Track)

    - + z3-mvn 0 10410783.00610779.915 @@ -259,7 +259,7 @@

    QF_RDL (Model Validation Track)

    - + SMTInterpol 0 10314173.80413142.9626 @@ -268,7 +268,7 @@

    QF_RDL (Model Validation Track)

    - + OpenSMT 0 10016172.25516173.4839 @@ -292,7 +292,6 @@

    QF_RDL (Model Validation Track)

    - + - diff --git a/archive/2021/results/qf-rdl-single-query.html b/archive/2021/results/qf-rdl-single-query.html index 084b9127..e87ae7ed 100644 --- a/archive/2021/results/qf-rdl-single-query.html +++ b/archive/2021/results/qf-rdl-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_RDL (Single Query Track)

    Competition results for the QF_RDL - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_RDL (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Yices2Yices2Yices2 - - + + Yices2 - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_RDL (Single Query Track)

    - + 2019-Yices 2.6.2n 0 213 @@ -142,7 +142,7 @@

    QF_RDL (Single Query Track)

    - + Yices2 0 213 @@ -153,7 +153,7 @@

    QF_RDL (Single Query Track)

    - + veriT 0 210 @@ -164,7 +164,7 @@

    QF_RDL (Single Query Track)

    - + cvc5 0 210 @@ -175,7 +175,7 @@

    QF_RDL (Single Query Track)

    - + z3n 0 206 @@ -186,7 +186,7 @@

    QF_RDL (Single Query Track)

    - + MathSAT5n 0 202 @@ -197,7 +197,7 @@

    QF_RDL (Single Query Track)

    - + OpenSMT 0 188 @@ -208,7 +208,7 @@

    QF_RDL (Single Query Track)

    - + SMTInterpol 0 187 @@ -230,7 +230,7 @@

    QF_RDL (Single Query Track)

    - + 2019-Yices 2.6.2n 0 21343120.35443118.1142131051083434 @@ -239,7 +239,7 @@

    QF_RDL (Single Query Track)

    - + Yices2 0 21343314.65743314.952131051083434 @@ -248,7 +248,7 @@

    QF_RDL (Single Query Track)

    - + veriT 0 21051528.87851525.0372101041063737 @@ -257,7 +257,7 @@

    QF_RDL (Single Query Track)

    - + cvc5 0 21052171.59752122.7982101031073737 @@ -266,7 +266,7 @@

    QF_RDL (Single Query Track)

    - + z3n 0 20654311.77954314.045206991074141 @@ -275,7 +275,7 @@

    QF_RDL (Single Query Track)

    - + MathSAT5n 0 20264162.77764164.395202103994545 @@ -284,7 +284,7 @@

    QF_RDL (Single Query Track)

    - + OpenSMT 0 18884757.87784701.33318897915959 @@ -293,7 +293,7 @@

    QF_RDL (Single Query Track)

    - + SMTInterpol 0 18787373.29485309.40218799886060 @@ -313,7 +313,7 @@

    QF_RDL (Single Query Track)

    - + 2019-Yices 2.6.2n 0 105767.8766.9531051050014234 @@ -322,7 +322,7 @@

    QF_RDL (Single Query Track)

    - + Yices2 0 105793.33793.4341051050014234 @@ -331,7 +331,7 @@

    QF_RDL (Single Query Track)

    - + veriT 0 1044426.7114422.5091041040114237 @@ -340,7 +340,7 @@

    QF_RDL (Single Query Track)

    - + cvc5 0 1036366.1376328.271031030214237 @@ -349,7 +349,7 @@

    QF_RDL (Single Query Track)

    - + MathSAT5n 0 1036753.9426754.4131031030214245 @@ -358,7 +358,7 @@

    QF_RDL (Single Query Track)

    - + z3n 0 9910808.27410807.57799990614241 @@ -367,7 +367,7 @@

    QF_RDL (Single Query Track)

    - + SMTInterpol 0 9914011.68413099.55799990614260 @@ -376,7 +376,7 @@

    QF_RDL (Single Query Track)

    - + OpenSMT 0 9716129.50516096.27197970814259 @@ -396,7 +396,7 @@

    QF_RDL (Single Query Track)

    - + 2019-Yices 2.6.2n 0 1082752.5542751.1621080108113834 @@ -405,7 +405,7 @@

    QF_RDL (Single Query Track)

    - + Yices2 0 1082921.3272921.5161080108113834 @@ -414,7 +414,7 @@

    QF_RDL (Single Query Track)

    - + z3n 0 1073903.5053906.4691070107213841 @@ -423,7 +423,7 @@

    QF_RDL (Single Query Track)

    - + cvc5 0 1076205.466194.5291070107213837 @@ -432,7 +432,7 @@

    QF_RDL (Single Query Track)

    - + veriT 0 1067502.1677502.5281060106313837 @@ -441,7 +441,7 @@

    QF_RDL (Single Query Track)

    - + MathSAT5n 0 9917808.83517809.982990991013845 @@ -450,7 +450,7 @@

    QF_RDL (Single Query Track)

    - + OpenSMT 0 9129028.37229005.062910911813859 @@ -459,7 +459,7 @@

    QF_RDL (Single Query Track)

    - + SMTInterpol 0 8833761.60932609.845880882113860 @@ -479,7 +479,7 @@

    QF_RDL (Single Query Track)

    - + 2019-Yices 2.6.2n 0 1951624.8931622.445195100955252 @@ -488,7 +488,7 @@

    QF_RDL (Single Query Track)

    - + Yices2 0 1941643.9161644.071194100945353 @@ -497,7 +497,7 @@

    QF_RDL (Single Query Track)

    - + z3n 0 1812144.7952146.5518189926666 @@ -506,7 +506,7 @@

    QF_RDL (Single Query Track)

    - + veriT 0 1702251.5892246.81917083877777 @@ -515,7 +515,7 @@

    QF_RDL (Single Query Track)

    - + cvc5 0 1662570.6762559.07616684828181 @@ -524,7 +524,7 @@

    QF_RDL (Single Query Track)

    - + MathSAT5n 0 1632486.9482487.03216386778484 @@ -533,7 +533,7 @@

    QF_RDL (Single Query Track)

    - + OpenSMT 0 1373049.6733018.4631377463110110 @@ -542,7 +542,7 @@

    QF_RDL (Single Query Track)

    - + SMTInterpol 0 1274262.3383511.8911276958120120 @@ -566,7 +566,6 @@

    QF_RDL (Single Query Track)

    - + - diff --git a/archive/2021/results/qf-s-single-query.html b/archive/2021/results/qf-s-single-query.html index 35300fe4..21d9459d 100644 --- a/archive/2021/results/qf-s-single-query.html +++ b/archive/2021/results/qf-s-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_S (Single Query Track)

    Competition results for the QF_S - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_S (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + Z3str4 - - + + cvc5 - + @@ -131,7 +131,7 @@

    QF_S (Single Query Track)

    - + cvc5 0 807 @@ -142,7 +142,7 @@

    QF_S (Single Query Track)

    - + cvc5 - fixedn 0 806 @@ -153,7 +153,7 @@

    QF_S (Single Query Track)

    - + 2020-CVC4n 0 806 @@ -164,7 +164,7 @@

    QF_S (Single Query Track)

    - + z3n 0 804 @@ -175,7 +175,7 @@

    QF_S (Single Query Track)

    - + Z3str4 0 799 @@ -197,7 +197,7 @@

    QF_S (Single Query Track)

    - + cvc5 0 8072253.5762272.85180746334400 @@ -206,7 +206,7 @@

    QF_S (Single Query Track)

    - + cvc5 - fixedn 0 8062610.322625.5180646234411 @@ -215,7 +215,7 @@

    QF_S (Single Query Track)

    - + 2020-CVC4n 0 8065675.2065654.18680646234411 @@ -224,7 +224,7 @@

    QF_S (Single Query Track)

    - + z3n 0 8043832.3743832.62480446034433 @@ -233,7 +233,7 @@

    QF_S (Single Query Track)

    - + Z3str4 0 79910217.58210212.84979945534488 @@ -253,7 +253,7 @@

    QF_S (Single Query Track)

    - + cvc5 0 4632246.2922265.714463463003440 @@ -262,7 +262,7 @@

    QF_S (Single Query Track)

    - + cvc5 - fixedn 0 4622603.0222618.294462462013441 @@ -271,7 +271,7 @@

    QF_S (Single Query Track)

    - + 2020-CVC4n 0 4625667.9555646.77462462013441 @@ -280,7 +280,7 @@

    QF_S (Single Query Track)

    - + z3n 0 4603821.3543821.57460460033443 @@ -289,7 +289,7 @@

    QF_S (Single Query Track)

    - + Z3str4 0 45510211.49410206.726455455083448 @@ -309,7 +309,7 @@

    QF_S (Single Query Track)

    - + Z3str4 0 3446.0886.124344034404638 @@ -318,7 +318,7 @@

    QF_S (Single Query Track)

    - + cvc5 0 3447.2857.137344034404630 @@ -327,7 +327,7 @@

    QF_S (Single Query Track)

    - + cvc5 - fixedn 0 3447.2987.216344034404631 @@ -336,7 +336,7 @@

    QF_S (Single Query Track)

    - + 2020-CVC4n 0 3447.2517.415344034404631 @@ -345,7 +345,7 @@

    QF_S (Single Query Track)

    - + z3n 0 34411.0211.054344034404633 @@ -365,7 +365,7 @@

    QF_S (Single Query Track)

    - + z3n 0 802230.722230.84780245834455 @@ -374,7 +374,7 @@

    QF_S (Single Query Track)

    - + cvc5 - fixedn 0 797455.749452.4897974533441010 @@ -383,7 +383,7 @@

    QF_S (Single Query Track)

    - + cvc5 0 797454.149452.8877974533441010 @@ -392,7 +392,7 @@

    QF_S (Single Query Track)

    - + Z3str4 0 797754.067749.3267974533441010 @@ -401,7 +401,7 @@

    QF_S (Single Query Track)

    - + 2020-CVC4n 0 7801205.451197.3167804363442727 @@ -425,7 +425,6 @@

    QF_S (Single Query Track)

    - + - diff --git a/archive/2021/results/qf-slia-single-query.html b/archive/2021/results/qf-slia-single-query.html index 23f3d315..29e90bda 100644 --- a/archive/2021/results/qf-slia-single-query.html +++ b/archive/2021/results/qf-slia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_SLIA (Single Query Track)

    Competition results for the QF_SLIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_SLIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    QF_SLIA (Single Query Track)

    - + 2020-CVC4n 0 22718 @@ -142,7 +142,7 @@

    QF_SLIA (Single Query Track)

    - + cvc5 0 22695 @@ -153,7 +153,7 @@

    QF_SLIA (Single Query Track)

    - + cvc5 - fixedn 0 22682 @@ -164,7 +164,7 @@

    QF_SLIA (Single Query Track)

    - + z3n 0 22272 @@ -175,7 +175,7 @@

    QF_SLIA (Single Query Track)

    - + Z3str4 117 19019 @@ -197,7 +197,7 @@

    QF_SLIA (Single Query Track)

    - + 2020-CVC4n 0 22717470904.364472415.10222717148787839268268 @@ -206,7 +206,7 @@

    QF_SLIA (Single Query Track)

    - + cvc5 0 22695506119.113507953.32122695148597836290289 @@ -215,7 +215,7 @@

    QF_SLIA (Single Query Track)

    - + cvc5 - fixedn 0 22682514393.684516164.70322682148487834303302 @@ -224,7 +224,7 @@

    QF_SLIA (Single Query Track)

    - + z3n 0 222721069502.4141068992.9222272145977675713713 @@ -233,7 +233,7 @@

    QF_SLIA (Single Query Track)

    - + Z3str4 117 190194016005.8114016002.0881901911437758239663323 @@ -253,7 +253,7 @@

    QF_SLIA (Single Query Track)

    - + 2020-CVC4n 0 14878234172.112235402.47114878148780948013268 @@ -262,7 +262,7 @@

    QF_SLIA (Single Query Track)

    - + cvc5 0 14859264599.211266006.8148591485901138013289 @@ -271,7 +271,7 @@

    QF_SLIA (Single Query Track)

    - + cvc5 - fixedn 0 14848271100.762272395.687148481484801248013302 @@ -280,7 +280,7 @@

    QF_SLIA (Single Query Track)

    - + z3n 0 14597655941.196655461.66145971459703758013713 @@ -289,7 +289,7 @@

    QF_SLIA (Single Query Track)

    - + Z3str4 0 114373733980.4053733976.21811437114370353580133323 @@ -309,7 +309,7 @@

    QF_SLIA (Single Query Track)

    - + 2020-CVC4n 0 783978332.25278612.6317839078394215104268 @@ -318,7 +318,7 @@

    QF_SLIA (Single Query Track)

    - + cvc5 0 783683119.90283546.5217836078364515104289 @@ -327,7 +327,7 @@

    QF_SLIA (Single Query Track)

    - + cvc5 - fixedn 0 783484892.92285369.0167834078344715104302 @@ -336,7 +336,7 @@

    QF_SLIA (Single Query Track)

    - + z3n 0 7675255161.218255131.2676750767520615104713 @@ -345,7 +345,7 @@

    QF_SLIA (Single Query Track)

    - + Z3str4 117 7582136750.262136750.725758207582299151043323 @@ -365,7 +365,7 @@

    QF_SLIA (Single Query Track)

    - + cvc5 - fixedn 0 2209434433.70434264.72422094143847710891890 @@ -374,7 +374,7 @@

    QF_SLIA (Single Query Track)

    - + cvc5 0 2209334338.58134237.32922093143837710892891 @@ -383,7 +383,7 @@

    QF_SLIA (Single Query Track)

    - + 2020-CVC4n 0 2208533962.2333858.59122085143657720900900 @@ -392,7 +392,7 @@

    QF_SLIA (Single Query Track)

    - + z3n 0 2121565246.29364935.3962121513607760817701770 @@ -401,7 +401,7 @@

    QF_SLIA (Single Query Track)

    - + Z3str4 117 1891886711.04686703.4641891811336758240673440 @@ -425,7 +425,6 @@

    QF_SLIA (Single Query Track)

    - + - diff --git a/archive/2021/results/qf-snia-single-query.html b/archive/2021/results/qf-snia-single-query.html index ed4fde7d..02fe7b19 100644 --- a/archive/2021/results/qf-snia-single-query.html +++ b/archive/2021/results/qf-snia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_SNIA (Single Query Track)

    Competition results for the QF_SNIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_SNIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + — - - + + cvc5 - + @@ -131,7 +131,7 @@

    QF_SNIA (Single Query Track)

    - + cvc5 - fixedn 0 5 @@ -142,7 +142,7 @@

    QF_SNIA (Single Query Track)

    - + cvc5 0 5 @@ -153,7 +153,7 @@

    QF_SNIA (Single Query Track)

    - + Z3str4 0 0 @@ -175,7 +175,7 @@

    QF_SNIA (Single Query Track)

    - + cvc5 - fixedn 0 50.1260.12355000 @@ -184,7 +184,7 @@

    QF_SNIA (Single Query Track)

    - + cvc5 0 50.1270.12555000 @@ -193,7 +193,7 @@

    QF_SNIA (Single Query Track)

    - + Z3str4 0 00.1020.10400050 @@ -213,7 +213,7 @@

    QF_SNIA (Single Query Track)

    - + cvc5 - fixedn 0 50.1260.123550000 @@ -222,7 +222,7 @@

    QF_SNIA (Single Query Track)

    - + cvc5 0 50.1270.125550000 @@ -231,7 +231,7 @@

    QF_SNIA (Single Query Track)

    - + Z3str4 0 00.1020.104000500 @@ -251,7 +251,7 @@

    QF_SNIA (Single Query Track)

    - + cvc5 0 00.00.0000050 @@ -260,7 +260,7 @@

    QF_SNIA (Single Query Track)

    - + Z3str4 0 00.00.0000050 @@ -269,7 +269,7 @@

    QF_SNIA (Single Query Track)

    - + cvc5 - fixedn 0 00.00.0000050 @@ -289,7 +289,7 @@

    QF_SNIA (Single Query Track)

    - + cvc5 - fixedn 0 50.1260.12355000 @@ -298,7 +298,7 @@

    QF_SNIA (Single Query Track)

    - + cvc5 0 50.1270.12555000 @@ -307,7 +307,7 @@

    QF_SNIA (Single Query Track)

    - + Z3str4 0 00.1020.10400050 @@ -331,7 +331,6 @@

    QF_SNIA (Single Query Track)

    - + - diff --git a/archive/2021/results/qf-strings-single-query.html b/archive/2021/results/qf-strings-single-query.html index 9fb382d5..20341470 100644 --- a/archive/2021/results/qf-strings-single-query.html +++ b/archive/2021/results/qf-strings-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Strings (Single Query Track)

    Competition results for the QF_Strings - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_Strings (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    QF_Strings (Single Query Track)

    - + 2020-CVC4n 0 23524 @@ -142,7 +142,7 @@

    QF_Strings (Single Query Track)

    - + cvc5 0 23507 @@ -153,7 +153,7 @@

    QF_Strings (Single Query Track)

    - + cvc5 - fixedn 0 23493 @@ -164,7 +164,7 @@

    QF_Strings (Single Query Track)

    - + z3n 0 23076 @@ -175,7 +175,7 @@

    QF_Strings (Single Query Track)

    - + Z3str4 117 19818 @@ -197,7 +197,7 @@

    QF_Strings (Single Query Track)

    - + 2020-CVC4n 0 23523476579.57478069.287235231534081832695269 @@ -206,7 +206,7 @@

    QF_Strings (Single Query Track)

    - + cvc5 0 23507508372.816510226.297235071532781802900289 @@ -215,7 +215,7 @@

    QF_Strings (Single Query Track)

    - + cvc5 - fixedn 0 23493517004.13518790.337234931531581783040303 @@ -224,7 +224,7 @@

    QF_Strings (Single Query Track)

    - + z3n 0 230761073334.7881072825.544230761505780197165716 @@ -233,7 +233,7 @@

    QF_Strings (Single Query Track)

    - + Z3str4 117 198184026223.4964026215.04219818118927926397903331 @@ -253,7 +253,7 @@

    QF_Strings (Single Query Track)

    - + 2020-CVC4n 0 15340239840.067241049.24115340153400958362269 @@ -262,7 +262,7 @@

    QF_Strings (Single Query Track)

    - + cvc5 0 15327266845.63268272.639153271532701138357289 @@ -271,7 +271,7 @@

    QF_Strings (Single Query Track)

    - + cvc5 - fixedn 0 15315273703.911275014.105153151531501258357303 @@ -280,7 +280,7 @@

    QF_Strings (Single Query Track)

    - + z3n 0 15057659762.55659283.231150571505703788362716 @@ -289,7 +289,7 @@

    QF_Strings (Single Query Track)

    - + Z3str4 0 118923744192.0013744183.04711892118920354883573331 @@ -309,7 +309,7 @@

    QF_Strings (Single Query Track)

    - + 2020-CVC4n 0 818378339.50378620.0468183081834215572269 @@ -318,7 +318,7 @@

    QF_Strings (Single Query Track)

    - + cvc5 0 818083127.18683553.6588180081804515572289 @@ -327,7 +327,7 @@

    QF_Strings (Single Query Track)

    - + cvc5 - fixedn 0 817884900.2285376.2328178081784715572303 @@ -336,7 +336,7 @@

    QF_Strings (Single Query Track)

    - + z3n 0 8019255172.238255142.31380190801920615572716 @@ -345,7 +345,7 @@

    QF_Strings (Single Query Track)

    - + Z3str4 117 7926136756.35136756.849792607926299155723331 @@ -365,7 +365,7 @@

    QF_Strings (Single Query Track)

    - + cvc5 - fixedn 0 2289634889.57934717.337228961484280549010900 @@ -374,7 +374,7 @@

    QF_Strings (Single Query Track)

    - + cvc5 0 2289534792.85734690.341228951484180549020901 @@ -383,7 +383,7 @@

    QF_Strings (Single Query Track)

    - + 2020-CVC4n 0 2286535167.6835055.908228651480180649275927 @@ -392,7 +392,7 @@

    QF_Strings (Single Query Track)

    - + z3n 0 2201765477.01565166.24322017140657952177551775 @@ -401,7 +401,7 @@

    QF_Strings (Single Query Track)

    - + Z3str4 117 1971587465.21687452.89419715117897926408203450 @@ -425,7 +425,6 @@

    QF_Strings (Single Query Track)

    - + - diff --git a/archive/2021/results/qf-uf-incremental.html b/archive/2021/results/qf-uf-incremental.html index 2d08cbeb..92483206 100644 --- a/archive/2021/results/qf-uf-incremental.html +++ b/archive/2021/results/qf-uf-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UF (Incremental Track)

    Competition results for the QF_UF - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_UF (Incremental Track)

    Parallel Performance cvc5-inc - - + + @@ -124,7 +124,7 @@

    QF_UF (Incremental Track)

    - + z3n 0 14209270.456249.50500 @@ -133,7 +133,7 @@

    QF_UF (Incremental Track)

    - + cvc5-inc 0 14209771.867716.100 @@ -142,7 +142,7 @@

    QF_UF (Incremental Track)

    - + SMTInterpol 0 142093593.4421307.27100 @@ -151,7 +151,7 @@

    QF_UF (Incremental Track)

    - + 2019-Yices 2.6.2 Incrementaln 0 81097.21483.107133990 @@ -160,7 +160,7 @@

    QF_UF (Incremental Track)

    - + Yices2 incremental 0 810104.19489.891133990 @@ -169,7 +169,7 @@

    QF_UF (Incremental Track)

    - + OpenSMT 0 8092871.0092837.361134001 @@ -178,7 +178,7 @@

    QF_UF (Incremental Track)

    - + MathSAT5n 0 762184.494169.733134470 @@ -202,7 +202,6 @@

    QF_UF (Incremental Track)

    - + - diff --git a/archive/2021/results/qf-uf-model-validation.html b/archive/2021/results/qf-uf-model-validation.html index e3996266..9cd1e4af 100644 --- a/archive/2021/results/qf-uf-model-validation.html +++ b/archive/2021/results/qf-uf-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UF (Model Validation Track)

    Competition results for the QF_UF - + logic - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_UF (Model Validation Track)

    Sequential PerformanceParallel Performance Yices2 model-validationYices2 model-validation - - + + @@ -126,7 +126,7 @@

    QF_UF (Model Validation Track)

    - + Yices2 model-validation 0 1571 @@ -137,7 +137,7 @@

    QF_UF (Model Validation Track)

    - + OpenSMT 0 1571 @@ -148,7 +148,7 @@

    QF_UF (Model Validation Track)

    - + cvc5-mv 0 1571 @@ -159,7 +159,7 @@

    QF_UF (Model Validation Track)

    - + SMTInterpol 0 1571 @@ -170,7 +170,7 @@

    QF_UF (Model Validation Track)

    - + z3-mvn 0 1555 @@ -181,7 +181,7 @@

    QF_UF (Model Validation Track)

    - + MathSAT5n 0 640 @@ -203,7 +203,7 @@

    QF_UF (Model Validation Track)

    - + Yices2 model-validation 0 157173.09877.2240 @@ -212,7 +212,7 @@

    QF_UF (Model Validation Track)

    - + OpenSMT 0 1571562.781572.4290 @@ -221,7 +221,7 @@

    QF_UF (Model Validation Track)

    - + cvc5-mv 0 1571731.025728.4360 @@ -230,7 +230,7 @@

    QF_UF (Model Validation Track)

    - + SMTInterpol 0 15714841.7951974.7910 @@ -239,7 +239,7 @@

    QF_UF (Model Validation Track)

    - + z3-mvn 0 1555243.214232.2620 @@ -248,7 +248,7 @@

    QF_UF (Model Validation Track)

    - + MathSAT5n 0 640227.882226.9470 @@ -272,7 +272,6 @@

    QF_UF (Model Validation Track)

    - + - diff --git a/archive/2021/results/qf-uf-single-query.html b/archive/2021/results/qf-uf-single-query.html index 06323883..7c64e213 100644 --- a/archive/2021/results/qf-uf-single-query.html +++ b/archive/2021/results/qf-uf-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UF (Single Query Track)

    Competition results for the QF_UF - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_UF (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Yices2Yices2Yices2 - - + + Yices2 - - + + veriT - + @@ -131,7 +131,7 @@

    QF_UF (Single Query Track)

    - + 2020-Yices2n 0 3522 @@ -142,7 +142,7 @@

    QF_UF (Single Query Track)

    - + 2020-Yices2-fixedn 0 3522 @@ -153,7 +153,7 @@

    QF_UF (Single Query Track)

    - + Yices2 0 3522 @@ -164,7 +164,7 @@

    QF_UF (Single Query Track)

    - + 2019-Par4n 0 3521 @@ -175,7 +175,7 @@

    QF_UF (Single Query Track)

    - + veriT 0 3521 @@ -186,7 +186,7 @@

    QF_UF (Single Query Track)

    - + z3n 0 3520 @@ -197,7 +197,7 @@

    QF_UF (Single Query Track)

    - + 2020-z3n 0 3520 @@ -208,7 +208,7 @@

    QF_UF (Single Query Track)

    - + OpenSMT 0 3517 @@ -219,7 +219,7 @@

    QF_UF (Single Query Track)

    - + cvc5 0 3517 @@ -230,7 +230,7 @@

    QF_UF (Single Query Track)

    - + SMTInterpol 0 3478 @@ -241,7 +241,7 @@

    QF_UF (Single Query Track)

    - + MathSAT5n 0 3433 @@ -263,7 +263,7 @@

    QF_UF (Single Query Track)

    - + 2020-Yices2n 0 35221068.0461078.47635221495202700 @@ -272,7 +272,7 @@

    QF_UF (Single Query Track)

    - + 2020-Yices2-fixedn 0 35221085.8321096.46135221495202700 @@ -281,7 +281,7 @@

    QF_UF (Single Query Track)

    - + Yices2 0 35221111.3691130.98135221495202700 @@ -290,7 +290,7 @@

    QF_UF (Single Query Track)

    - + 2019-Par4n 0 35222184.1981242.8835221495202700 @@ -299,7 +299,7 @@

    QF_UF (Single Query Track)

    - + veriT 0 35212544.9412542.05935211495202611 @@ -308,7 +308,7 @@

    QF_UF (Single Query Track)

    - + z3n 0 35206698.6916692.47235201495202522 @@ -317,7 +317,7 @@

    QF_UF (Single Query Track)

    - + 2020-z3n 0 35206949.7566934.72135201495202522 @@ -326,7 +326,7 @@

    QF_UF (Single Query Track)

    - + OpenSMT 0 351713552.39513524.9235171495202255 @@ -335,7 +335,7 @@

    QF_UF (Single Query Track)

    - + cvc5 0 351714678.79414608.18335171495202255 @@ -344,7 +344,7 @@

    QF_UF (Single Query Track)

    - + SMTInterpol 0 348085491.19170394.7523480149519854242 @@ -353,7 +353,7 @@

    QF_UF (Single Query Track)

    - + MathSAT5n 0 343349898.21649873.4533433145519788935 @@ -373,7 +373,7 @@

    QF_UF (Single Query Track)

    - + 2020-Yices2-fixedn 0 149554.84760.133149514950020270 @@ -382,7 +382,7 @@

    QF_UF (Single Query Track)

    - + 2020-Yices2n 0 149555.11860.499149514950020270 @@ -391,7 +391,7 @@

    QF_UF (Single Query Track)

    - + Yices2 0 149554.99665.404149514950020270 @@ -400,7 +400,7 @@

    QF_UF (Single Query Track)

    - + 2019-Par4n 0 149528.1368.538149514950020270 @@ -409,7 +409,7 @@

    QF_UF (Single Query Track)

    - + veriT 0 1495148.797146.654149514950020271 @@ -418,7 +418,7 @@

    QF_UF (Single Query Track)

    - + z3n 0 1495297.141297.344149514950020272 @@ -427,7 +427,7 @@

    QF_UF (Single Query Track)

    - + 2020-z3n 0 1495339.956339.551149514950020272 @@ -436,7 +436,7 @@

    QF_UF (Single Query Track)

    - + OpenSMT 0 1495646.262640.611149514950020275 @@ -445,7 +445,7 @@

    QF_UF (Single Query Track)

    - + cvc5 0 1495648.879645.269149514950020275 @@ -454,7 +454,7 @@

    QF_UF (Single Query Track)

    - + SMTInterpol 0 14954397.0031771.9491495149500202742 @@ -463,7 +463,7 @@

    QF_UF (Single Query Track)

    - + MathSAT5n 0 1455168.347168.50414551455040202735 @@ -483,7 +483,7 @@

    QF_UF (Single Query Track)

    - + 2020-Yices2n 0 20271012.9281017.977202702027014950 @@ -492,7 +492,7 @@

    QF_UF (Single Query Track)

    - + 2020-Yices2-fixedn 0 20271030.9841036.328202702027014950 @@ -501,7 +501,7 @@

    QF_UF (Single Query Track)

    - + Yices2 0 20271056.3731065.576202702027014950 @@ -510,7 +510,7 @@

    QF_UF (Single Query Track)

    - + 2019-Par4n 0 20272156.0681174.342202702027014950 @@ -519,7 +519,7 @@

    QF_UF (Single Query Track)

    - + veriT 0 20262396.1442395.405202602026114951 @@ -528,7 +528,7 @@

    QF_UF (Single Query Track)

    - + z3n 0 20256401.556395.128202502025214952 @@ -537,7 +537,7 @@

    QF_UF (Single Query Track)

    - + 2020-z3n 0 20256609.86595.17202502025214952 @@ -546,7 +546,7 @@

    QF_UF (Single Query Track)

    - + OpenSMT 0 202212906.13412884.309202202022514955 @@ -555,7 +555,7 @@

    QF_UF (Single Query Track)

    - + cvc5 0 202214029.91513962.914202202022514955 @@ -564,7 +564,7 @@

    QF_UF (Single Query Track)

    - + SMTInterpol 0 198581094.18868622.80319850198542149542 @@ -573,7 +573,7 @@

    QF_UF (Single Query Track)

    - + MathSAT5n 0 197849729.86949704.94919780197849149535 @@ -593,7 +593,7 @@

    QF_UF (Single Query Track)

    - + 2019-Par4n 0 3519308.138340.60435191495202433 @@ -602,7 +602,7 @@

    QF_UF (Single Query Track)

    - + veriT 0 3518538.033534.9335181495202344 @@ -611,7 +611,7 @@

    QF_UF (Single Query Track)

    - + 2020-Yices2-fixedn 0 3516374.465385.05435161495202166 @@ -620,7 +620,7 @@

    QF_UF (Single Query Track)

    - + 2020-Yices2n 0 3516374.78385.11835161495202166 @@ -629,7 +629,7 @@

    QF_UF (Single Query Track)

    - + Yices2 0 3516375.659395.10435161495202166 @@ -638,7 +638,7 @@

    QF_UF (Single Query Track)

    - + 2020-z3n 0 34852377.8812362.7023485149119943737 @@ -647,7 +647,7 @@

    QF_UF (Single Query Track)

    - + z3n 0 34842276.6862270.0013484149119933838 @@ -656,7 +656,7 @@

    QF_UF (Single Query Track)

    - + cvc5 0 34763245.6983230.8873476149519814646 @@ -665,7 +665,7 @@

    QF_UF (Single Query Track)

    - + OpenSMT 0 34713671.6783643.5323471148919825151 @@ -674,7 +674,7 @@

    QF_UF (Single Query Track)

    - + SMTInterpol 0 342020469.5739525.572342014951925102102 @@ -683,7 +683,7 @@

    QF_UF (Single Query Track)

    - + MathSAT5n 0 33962825.5462799.82533961455194112672 @@ -707,7 +707,6 @@

    QF_UF (Single Query Track)

    - + - diff --git a/archive/2021/results/qf-uf-unsat-core.html b/archive/2021/results/qf-uf-unsat-core.html index 5c3c53cd..558a6e3b 100644 --- a/archive/2021/results/qf-uf-unsat-core.html +++ b/archive/2021/results/qf-uf-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UF (Unsat Core Track)

    Competition results for the QF_UF - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_UF (Unsat Core Track)

    Sequential PerformanceParallel Performance SMTInterpolSMTInterpol - - + + @@ -126,7 +126,7 @@

    QF_UF (Unsat Core Track)

    - + z3n 0 239021 @@ -137,7 +137,7 @@

    QF_UF (Unsat Core Track)

    - + 2020-SMTInterpol-fixedn 0 238609 @@ -148,7 +148,7 @@

    QF_UF (Unsat Core Track)

    - + SMTInterpol 0 238158 @@ -159,7 +159,7 @@

    QF_UF (Unsat Core Track)

    - + cvc5-uc 0 237770 @@ -170,7 +170,7 @@

    QF_UF (Unsat Core Track)

    - + 2020-Yices2-fixedn 0 237670 @@ -181,7 +181,7 @@

    QF_UF (Unsat Core Track)

    - + Yices2 0 237670 @@ -192,7 +192,7 @@

    QF_UF (Unsat Core Track)

    - + MathSAT5n 0 233468 @@ -203,7 +203,7 @@

    QF_UF (Unsat Core Track)

    - + SMTInterpol-remus 0 72337 @@ -225,7 +225,7 @@

    QF_UF (Unsat Core Track)

    - + z3n 0 2390214354.2984347.2290 @@ -234,7 +234,7 @@

    QF_UF (Unsat Core Track)

    - + 2020-SMTInterpol-fixedn 0 23860914240.3468338.9680 @@ -243,7 +243,7 @@

    QF_UF (Unsat Core Track)

    - + SMTInterpol 0 23815823034.85312478.8820 @@ -252,7 +252,7 @@

    QF_UF (Unsat Core Track)

    - + cvc5-uc 0 2377704915.6134896.4660 @@ -261,7 +261,7 @@

    QF_UF (Unsat Core Track)

    - + 2020-Yices2-fixedn 0 2376703725.3283685.8550 @@ -270,7 +270,7 @@

    QF_UF (Unsat Core Track)

    - + Yices2 0 2376703904.0753846.0850 @@ -279,7 +279,7 @@

    QF_UF (Unsat Core Track)

    - + MathSAT5n 0 2334681841.5011824.5720 @@ -288,7 +288,7 @@

    QF_UF (Unsat Core Track)

    - + SMTInterpol-remus 0 2168601091544.597997315.6632 @@ -312,7 +312,6 @@

    QF_UF (Unsat Core Track)

    - + - diff --git a/archive/2021/results/qf-ufbv-incremental.html b/archive/2021/results/qf-ufbv-incremental.html index 74e2a1b3..092daefe 100644 --- a/archive/2021/results/qf-ufbv-incremental.html +++ b/archive/2021/results/qf-ufbv-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFBV (Incremental Track)

    Competition results for the QF_UFBV - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_UFBV (Incremental Track)

    Parallel Performance Bitwuzla - - + + @@ -124,7 +124,7 @@

    QF_UFBV (Incremental Track)

    - + 2020-Bitwuzla-fixedn 0 28392141.4982111.63200 @@ -133,7 +133,7 @@

    QF_UFBV (Incremental Track)

    - + Bitwuzla 0 28392219.4682186.60600 @@ -142,7 +142,7 @@

    QF_UFBV (Incremental Track)

    - + 2020-Yices2 incrementaln 0 28353107.6953075.27642 @@ -151,7 +151,7 @@

    QF_UFBV (Incremental Track)

    - + Yices2 incremental 0 28353107.0823076.94542 @@ -160,7 +160,7 @@

    QF_UFBV (Incremental Track)

    - + MathSAT5n 0 27077995.7157958.651325 @@ -169,7 +169,7 @@

    QF_UFBV (Incremental Track)

    - + cvc5-inc 0 258615109.56915025.0122537 @@ -178,7 +178,7 @@

    QF_UFBV (Incremental Track)

    - + z3n 0 230622480.07422437.34953312 @@ -202,7 +202,6 @@

    QF_UFBV (Incremental Track)

    - + - diff --git a/archive/2021/results/qf-ufbv-model-validation.html b/archive/2021/results/qf-ufbv-model-validation.html index b3d3db79..85c897ec 100644 --- a/archive/2021/results/qf-ufbv-model-validation.html +++ b/archive/2021/results/qf-ufbv-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFBV (Model Validation Track)

    Competition results for the QF_UFBV - + logic - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_UFBV (Model Validation Track)

    Sequential PerformanceParallel Performance Yices2 model-validationYices2 model-validation - - + + @@ -126,7 +126,7 @@

    QF_UFBV (Model Validation Track)

    - + Yices2 model-validation 0 374 @@ -137,7 +137,7 @@

    QF_UFBV (Model Validation Track)

    - + Bitwuzla 0 374 @@ -148,7 +148,7 @@

    QF_UFBV (Model Validation Track)

    - + MathSAT5n 0 362 @@ -159,7 +159,7 @@

    QF_UFBV (Model Validation Track)

    - + cvc5-mv 0 361 @@ -170,7 +170,7 @@

    QF_UFBV (Model Validation Track)

    - + z3-mvn 0 358 @@ -192,7 +192,7 @@

    QF_UFBV (Model Validation Track)

    - + Yices2 model-validation 0 3742185.6242186.3091 @@ -201,7 +201,7 @@

    QF_UFBV (Model Validation Track)

    - + Bitwuzla 0 3742616.9572574.3021 @@ -210,7 +210,7 @@

    QF_UFBV (Model Validation Track)

    - + MathSAT5n 0 36216909.55116910.78813 @@ -219,7 +219,7 @@

    QF_UFBV (Model Validation Track)

    - + cvc5-mv 0 36126221.66226218.60714 @@ -228,7 +228,7 @@

    QF_UFBV (Model Validation Track)

    - + z3-mvn 0 35823534.36323524.38517 @@ -252,7 +252,6 @@

    QF_UFBV (Model Validation Track)

    - + - diff --git a/archive/2021/results/qf-ufbv-single-query.html b/archive/2021/results/qf-ufbv-single-query.html index 9064630e..48ddc4b9 100644 --- a/archive/2021/results/qf-ufbv-single-query.html +++ b/archive/2021/results/qf-ufbv-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFBV (Single Query Track)

    Competition results for the QF_UFBV - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_UFBV (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) BitwuzlaBitwuzlaBitwuzla - - + + Bitwuzla - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_UFBV (Single Query Track)

    - + 2020-Bitwuzlan 0 288 @@ -142,7 +142,7 @@

    QF_UFBV (Single Query Track)

    - + Bitwuzla 0 286 @@ -153,7 +153,7 @@

    QF_UFBV (Single Query Track)

    - + cvc5 0 261 @@ -164,7 +164,7 @@

    QF_UFBV (Single Query Track)

    - + 2020-Yices2n 0 251 @@ -175,7 +175,7 @@

    QF_UFBV (Single Query Track)

    - + 2020-Yices2-fixedn 0 250 @@ -186,7 +186,7 @@

    QF_UFBV (Single Query Track)

    - + Yices2 0 249 @@ -197,7 +197,7 @@

    QF_UFBV (Single Query Track)

    - + z3n 0 246 @@ -208,7 +208,7 @@

    QF_UFBV (Single Query Track)

    - + MathSAT5n 0 237 @@ -230,7 +230,7 @@

    QF_UFBV (Single Query Track)

    - + 2020-Bitwuzlan 0 28817604.12317578.616288163125125 @@ -239,7 +239,7 @@

    QF_UFBV (Single Query Track)

    - + Bitwuzla 0 28644137.44644085.042861621241414 @@ -248,7 +248,7 @@

    QF_UFBV (Single Query Track)

    - + cvc5 0 26178014.32678006.4242611521093939 @@ -257,7 +257,7 @@

    QF_UFBV (Single Query Track)

    - + 2020-Yices2n 0 25185132.02285134.955251162894949 @@ -266,7 +266,7 @@

    QF_UFBV (Single Query Track)

    - + 2020-Yices2-fixedn 0 25085540.66485532.172250162885050 @@ -275,7 +275,7 @@

    QF_UFBV (Single Query Track)

    - + Yices2 0 24985019.37784992.338249162875151 @@ -284,7 +284,7 @@

    QF_UFBV (Single Query Track)

    - + z3n 0 24684401.37184401.7252461421045454 @@ -293,7 +293,7 @@

    QF_UFBV (Single Query Track)

    - + MathSAT5n 0 23796312.53496284.36237145926363 @@ -313,7 +313,7 @@

    QF_UFBV (Single Query Track)

    - + 2020-Bitwuzlan 0 1633194.8783195.226163163011365 @@ -322,7 +322,7 @@

    QF_UFBV (Single Query Track)

    - + Bitwuzla 0 1624058.8794025.1771621620213614 @@ -331,7 +331,7 @@

    QF_UFBV (Single Query Track)

    - + 2020-Yices2n 0 1624512.0554512.4791621620213649 @@ -340,7 +340,7 @@

    QF_UFBV (Single Query Track)

    - + Yices2 0 1624527.3224527.6861621620213651 @@ -349,7 +349,7 @@

    QF_UFBV (Single Query Track)

    - + 2020-Yices2-fixedn 0 1624538.9974528.4781621620213650 @@ -358,7 +358,7 @@

    QF_UFBV (Single Query Track)

    - + cvc5 0 15221182.65721180.10315215201213639 @@ -367,7 +367,7 @@

    QF_UFBV (Single Query Track)

    - + MathSAT5n 0 14524138.34124124.75714514501913663 @@ -376,7 +376,7 @@

    QF_UFBV (Single Query Track)

    - + z3n 0 14230668.56430666.52314214202213654 @@ -396,7 +396,7 @@

    QF_UFBV (Single Query Track)

    - + 2020-Bitwuzlan 0 12514409.24514383.391250125111645 @@ -405,7 +405,7 @@

    QF_UFBV (Single Query Track)

    - + Bitwuzla 0 12440078.56740059.86312401241216414 @@ -414,7 +414,7 @@

    QF_UFBV (Single Query Track)

    - + cvc5 0 10956831.66956826.32110901092716439 @@ -423,7 +423,7 @@

    QF_UFBV (Single Query Track)

    - + z3n 0 10453732.80853735.20210401043216454 @@ -432,7 +432,7 @@

    QF_UFBV (Single Query Track)

    - + MathSAT5n 0 9272174.19372159.603920924416463 @@ -441,7 +441,7 @@

    QF_UFBV (Single Query Track)

    - + 2020-Yices2n 0 8980619.96780622.476890894716449 @@ -450,7 +450,7 @@

    QF_UFBV (Single Query Track)

    - + 2020-Yices2-fixedn 0 8881001.66781003.694880884816450 @@ -459,7 +459,7 @@

    QF_UFBV (Single Query Track)

    - + Yices2 0 8780492.05580464.653870874916451 @@ -479,7 +479,7 @@

    QF_UFBV (Single Query Track)

    - + 2020-Yices2n 0 1952857.5562857.72919515144105105 @@ -488,7 +488,7 @@

    QF_UFBV (Single Query Track)

    - + 2020-Yices2-fixedn 0 1952868.5212857.75719515144105105 @@ -497,7 +497,7 @@

    QF_UFBV (Single Query Track)

    - + Yices2 0 1952862.7342862.77519515144105105 @@ -506,7 +506,7 @@

    QF_UFBV (Single Query Track)

    - + 2020-Bitwuzlan 0 1953021.7183021.8711951365910598 @@ -515,7 +515,7 @@

    QF_UFBV (Single Query Track)

    - + Bitwuzla 0 1893250.6983227.79418913653111111 @@ -524,7 +524,7 @@

    QF_UFBV (Single Query Track)

    - + MathSAT5n 0 1693747.4773715.38716912940131131 @@ -533,7 +533,7 @@

    QF_UFBV (Single Query Track)

    - + z3n 0 1474356.6884354.00414711136153153 @@ -542,7 +542,7 @@

    QF_UFBV (Single Query Track)

    - + cvc5 0 1424640.4994625.85114210735158158 @@ -566,7 +566,6 @@

    QF_UFBV (Single Query Track)

    - + - diff --git a/archive/2021/results/qf-ufbv-unsat-core.html b/archive/2021/results/qf-ufbv-unsat-core.html index 9a5b0ddf..d380dd6f 100644 --- a/archive/2021/results/qf-ufbv-unsat-core.html +++ b/archive/2021/results/qf-ufbv-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFBV (Unsat Core Track)

    Competition results for the QF_UFBV - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_UFBV (Unsat Core Track)

    Sequential PerformanceParallel Performance BitwuzlaBitwuzla - - + + @@ -126,7 +126,7 @@

    QF_UFBV (Unsat Core Track)

    - + Bitwuzla 0 971861 @@ -137,7 +137,7 @@

    QF_UFBV (Unsat Core Track)

    - + cvc5-uc 0 920403 @@ -148,7 +148,7 @@

    QF_UFBV (Unsat Core Track)

    - + z3n 0 607199 @@ -159,7 +159,7 @@

    QF_UFBV (Unsat Core Track)

    - + 2020-Yices2n 0 601295 @@ -170,7 +170,7 @@

    QF_UFBV (Unsat Core Track)

    - + Yices2 0 601295 @@ -181,7 +181,7 @@

    QF_UFBV (Unsat Core Track)

    - + 2020-z3n 0 598359 @@ -192,7 +192,7 @@

    QF_UFBV (Unsat Core Track)

    - + MathSAT5n 0 0 @@ -214,7 +214,7 @@

    QF_UFBV (Unsat Core Track)

    - + Bitwuzla 0 97186121232.08121234.826 @@ -223,7 +223,7 @@

    QF_UFBV (Unsat Core Track)

    - + cvc5-uc 0 92040326099.55826094.1248 @@ -232,7 +232,7 @@

    QF_UFBV (Unsat Core Track)

    - + z3n 0 60719973850.65473850.88760 @@ -241,7 +241,7 @@

    QF_UFBV (Unsat Core Track)

    - + 2020-Yices2n 0 60129574221.62774222.2161 @@ -250,7 +250,7 @@

    QF_UFBV (Unsat Core Track)

    - + Yices2 0 60129574293.86574294.22861 @@ -259,7 +259,7 @@

    QF_UFBV (Unsat Core Track)

    - + 2020-z3n 0 59835974454.5274454.54562 @@ -268,7 +268,7 @@

    QF_UFBV (Unsat Core Track)

    - + MathSAT5n 0 028.40128.4650 @@ -292,7 +292,6 @@

    QF_UFBV (Unsat Core Track)

    - + - diff --git a/archive/2021/results/qf-ufdt-single-query.html b/archive/2021/results/qf-ufdt-single-query.html index d62c3990..8753912c 100644 --- a/archive/2021/results/qf-ufdt-single-query.html +++ b/archive/2021/results/qf-ufdt-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFDT (Single Query Track)

    Competition results for the QF_UFDT - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_UFDT (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    QF_UFDT (Single Query Track)

    - + cvc5 0 104 @@ -142,7 +142,7 @@

    QF_UFDT (Single Query Track)

    - + z3n 0 98 @@ -153,7 +153,7 @@

    QF_UFDT (Single Query Track)

    - + SMTInterpol 0 19 @@ -175,7 +175,7 @@

    QF_UFDT (Single Query Track)

    - + cvc5 0 104169831.325169838.36210428769999 @@ -184,7 +184,7 @@

    QF_UFDT (Single Query Track)

    - + z3n 0 98159938.689159959.089981088105105 @@ -193,7 +193,7 @@

    QF_UFDT (Single Query Track)

    - + SMTInterpol 0 23229865.65228663.5323320180180 @@ -213,7 +213,7 @@

    QF_UFDT (Single Query Track)

    - + cvc5 0 28103106.401103109.16282807510099 @@ -222,7 +222,7 @@

    QF_UFDT (Single Query Track)

    - + z3n 0 10115844.323115844.8151010093100105 @@ -231,7 +231,7 @@

    QF_UFDT (Single Query Track)

    - + SMTInterpol 0 3120001.068120000.835330100100180 @@ -251,7 +251,7 @@

    QF_UFDT (Single Query Track)

    - + z3n 0 8844094.36644114.2748808812103105 @@ -260,7 +260,7 @@

    QF_UFDT (Single Query Track)

    - + cvc5 0 7666724.92466729.202760762410399 @@ -269,7 +269,7 @@

    QF_UFDT (Single Query Track)

    - + SMTInterpol 0 20109864.582108662.6952002080103180 @@ -289,7 +289,7 @@

    QF_UFDT (Single Query Track)

    - + cvc5 0 34800.0524800.05330200200 @@ -298,7 +298,7 @@

    QF_UFDT (Single Query Track)

    - + z3n 0 34800.0874800.087330200200 @@ -307,7 +307,7 @@

    QF_UFDT (Single Query Track)

    - + SMTInterpol 0 34801.0684800.835330200200 @@ -331,7 +331,6 @@

    QF_UFDT (Single Query Track)

    - + - diff --git a/archive/2021/results/qf-ufdt-unsat-core.html b/archive/2021/results/qf-ufdt-unsat-core.html index 3dda9881..936a70ee 100644 --- a/archive/2021/results/qf-ufdt-unsat-core.html +++ b/archive/2021/results/qf-ufdt-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFDT (Unsat Core Track)

    Competition results for the QF_UFDT - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_UFDT (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5-uccvc5-uc - - + + @@ -126,7 +126,7 @@

    QF_UFDT (Unsat Core Track)

    - + z3n 0 684213 @@ -137,7 +137,7 @@

    QF_UFDT (Unsat Core Track)

    - + cvc5-uc 0 109799 @@ -148,7 +148,7 @@

    QF_UFDT (Unsat Core Track)

    - + SMTInterpol 0 40104 @@ -159,7 +159,7 @@

    QF_UFDT (Unsat Core Track)

    - + SMTInterpol-remus 0 40104 @@ -181,7 +181,7 @@

    QF_UFDT (Unsat Core Track)

    - + z3n 0 68421346709.50946717.93922 @@ -190,7 +190,7 @@

    QF_UFDT (Unsat Core Track)

    - + cvc5-uc 0 109799108639.362108641.05881 @@ -199,7 +199,7 @@

    QF_UFDT (Unsat Core Track)

    - + SMTInterpol 0 46404109851.649108788.70881 @@ -208,7 +208,7 @@

    QF_UFDT (Unsat Core Track)

    - + SMTInterpol-remus 0 41621110810.093109694.58982 @@ -232,7 +232,6 @@

    QF_UFDT (Unsat Core Track)

    - + - diff --git a/archive/2021/results/qf-ufdtlira-single-query.html b/archive/2021/results/qf-ufdtlira-single-query.html index 3e8ef471..3d045966 100644 --- a/archive/2021/results/qf-ufdtlira-single-query.html +++ b/archive/2021/results/qf-ufdtlira-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFDTLIRA (Single Query Track)

    Competition results for the QF_UFDTLIRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_UFDTLIRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    QF_UFDTLIRA (Single Query Track)

    - + cvc5 0 146 @@ -142,7 +142,7 @@

    QF_UFDTLIRA (Single Query Track)

    - + cvc5 - fixedn 0 146 @@ -153,7 +153,7 @@

    QF_UFDTLIRA (Single Query Track)

    - + SMTInterpol 0 137 @@ -175,7 +175,7 @@

    QF_UFDTLIRA (Single Query Track)

    - + cvc5 0 1462.742.669146806600 @@ -184,7 +184,7 @@

    QF_UFDTLIRA (Single Query Track)

    - + cvc5 - fixedn 0 1462.92.819146806600 @@ -193,7 +193,7 @@

    QF_UFDTLIRA (Single Query Track)

    - + SMTInterpol 0 13758.6843.387137756290 @@ -213,7 +213,7 @@

    QF_UFDTLIRA (Single Query Track)

    - + cvc5 0 801.4971.453808000660 @@ -222,7 +222,7 @@

    QF_UFDTLIRA (Single Query Track)

    - + cvc5 - fixedn 0 801.581.535808000660 @@ -231,7 +231,7 @@

    QF_UFDTLIRA (Single Query Track)

    - + SMTInterpol 0 7531.00623.105757505660 @@ -251,7 +251,7 @@

    QF_UFDTLIRA (Single Query Track)

    - + cvc5 0 661.2441.216660660800 @@ -260,7 +260,7 @@

    QF_UFDTLIRA (Single Query Track)

    - + cvc5 - fixedn 0 661.321.284660660800 @@ -269,7 +269,7 @@

    QF_UFDTLIRA (Single Query Track)

    - + SMTInterpol 0 6227.67420.282620624800 @@ -289,7 +289,7 @@

    QF_UFDTLIRA (Single Query Track)

    - + cvc5 0 1462.742.669146806600 @@ -298,7 +298,7 @@

    QF_UFDTLIRA (Single Query Track)

    - + cvc5 - fixedn 0 1462.92.819146806600 @@ -307,7 +307,7 @@

    QF_UFDTLIRA (Single Query Track)

    - + SMTInterpol 0 13758.6843.387137756290 @@ -331,7 +331,6 @@

    QF_UFDTLIRA (Single Query Track)

    - + - diff --git a/archive/2021/results/qf-ufdtlira-unsat-core.html b/archive/2021/results/qf-ufdtlira-unsat-core.html index 9c5ea057..8119f64d 100644 --- a/archive/2021/results/qf-ufdtlira-unsat-core.html +++ b/archive/2021/results/qf-ufdtlira-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFDTLIRA (Unsat Core Track)

    Competition results for the QF_UFDTLIRA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_UFDTLIRA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5-uccvc5-uc - - + + @@ -126,7 +126,7 @@

    QF_UFDTLIRA (Unsat Core Track)

    - + cvc5-uc 0 162 @@ -137,7 +137,7 @@

    QF_UFDTLIRA (Unsat Core Track)

    - + SMTInterpol 0 94 @@ -148,7 +148,7 @@

    QF_UFDTLIRA (Unsat Core Track)

    - + SMTInterpol-remus 0 0 @@ -170,7 +170,7 @@

    QF_UFDTLIRA (Unsat Core Track)

    - + cvc5-uc 0 1620.3650.3570 @@ -179,7 +179,7 @@

    QF_UFDTLIRA (Unsat Core Track)

    - + SMTInterpol 0 947.5285.4620 @@ -188,7 +188,7 @@

    QF_UFDTLIRA (Unsat Core Track)

    - + SMTInterpol-remus 0 04.2593.4280 @@ -212,7 +212,6 @@

    QF_UFDTLIRA (Unsat Core Track)

    - + - diff --git a/archive/2021/results/qf-uffp-incremental.html b/archive/2021/results/qf-uffp-incremental.html index 0244307e..3b35787d 100644 --- a/archive/2021/results/qf-uffp-incremental.html +++ b/archive/2021/results/qf-uffp-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFFP (Incremental Track)

    Competition results for the QF_UFFP - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_UFFP (Incremental Track)

    Parallel Performance Bitwuzla - - + + @@ -124,7 +124,7 @@

    QF_UFFP (Incremental Track)

    - + 2020-Bitwuzla-fixedn 0 20.1460.14600 @@ -133,7 +133,7 @@

    QF_UFFP (Incremental Track)

    - + Bitwuzla 0 20.1510.15100 @@ -142,7 +142,7 @@

    QF_UFFP (Incremental Track)

    - + Bitwuzla - fixedn 0 20.1520.15100 @@ -151,7 +151,7 @@

    QF_UFFP (Incremental Track)

    - + cvc5-inc 0 20.4650.46400 @@ -160,7 +160,7 @@

    QF_UFFP (Incremental Track)

    - + 2020-CVC4-incn 0 20.5760.57500 @@ -169,7 +169,7 @@

    QF_UFFP (Incremental Track)

    - + MathSAT5n 0 21.2221.22200 @@ -193,7 +193,6 @@

    QF_UFFP (Incremental Track)

    - + - diff --git a/archive/2021/results/qf-uffp-unsat-core.html b/archive/2021/results/qf-uffp-unsat-core.html index dcc436f7..c85ff1d5 100644 --- a/archive/2021/results/qf-uffp-unsat-core.html +++ b/archive/2021/results/qf-uffp-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFFP (Unsat Core Track)

    Competition results for the QF_UFFP - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_UFFP (Unsat Core Track)

    Sequential PerformanceParallel Performance —— - - + + @@ -126,7 +126,7 @@

    QF_UFFP (Unsat Core Track)

    - + MathSAT5n 0 0 @@ -137,7 +137,7 @@

    QF_UFFP (Unsat Core Track)

    - + Bitwuzla 0 0 @@ -148,7 +148,7 @@

    QF_UFFP (Unsat Core Track)

    - + 2020-Bitwuzlan 0 0 @@ -159,7 +159,7 @@

    QF_UFFP (Unsat Core Track)

    - + 2020-Bitwuzla-fixedn 0 0 @@ -170,7 +170,7 @@

    QF_UFFP (Unsat Core Track)

    - + cvc5-uc 0 0 @@ -192,7 +192,7 @@

    QF_UFFP (Unsat Core Track)

    - + MathSAT5n 0 00.0350.0350 @@ -201,7 +201,7 @@

    QF_UFFP (Unsat Core Track)

    - + Bitwuzla 0 00.210.210 @@ -210,7 +210,7 @@

    QF_UFFP (Unsat Core Track)

    - + 2020-Bitwuzlan 0 00.210.2110 @@ -219,7 +219,7 @@

    QF_UFFP (Unsat Core Track)

    - + 2020-Bitwuzla-fixedn 0 00.2440.2440 @@ -228,7 +228,7 @@

    QF_UFFP (Unsat Core Track)

    - + cvc5-uc 0 00.7110.710 @@ -252,7 +252,6 @@

    QF_UFFP (Unsat Core Track)

    - + - diff --git a/archive/2021/results/qf-uffpdtlira-single-query.html b/archive/2021/results/qf-uffpdtlira-single-query.html index 5e414974..230951b4 100644 --- a/archive/2021/results/qf-uffpdtlira-single-query.html +++ b/archive/2021/results/qf-uffpdtlira-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFFPDTLIRA (Single Query Track)

    Competition results for the QF_UFFPDTLIRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_UFFPDTLIRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    QF_UFFPDTLIRA (Single Query Track)

    - + cvc5 - fixedn 0 154 @@ -142,7 +142,7 @@

    QF_UFFPDTLIRA (Single Query Track)

    - + cvc5 0 154 @@ -164,7 +164,7 @@

    QF_UFFPDTLIRA (Single Query Track)

    - + cvc5 - fixedn 0 154123.536123.5561542413000 @@ -173,7 +173,7 @@

    QF_UFFPDTLIRA (Single Query Track)

    - + cvc5 0 154124.047124.0091542413000 @@ -193,7 +193,7 @@

    QF_UFFPDTLIRA (Single Query Track)

    - + cvc5 - fixedn 0 2467.13567.1762424001300 @@ -202,7 +202,7 @@

    QF_UFFPDTLIRA (Single Query Track)

    - + cvc5 0 2467.90967.9062424001300 @@ -222,7 +222,7 @@

    QF_UFFPDTLIRA (Single Query Track)

    - + cvc5 0 13056.13856.10313001300240 @@ -231,7 +231,7 @@

    QF_UFFPDTLIRA (Single Query Track)

    - + cvc5 - fixedn 0 13056.40256.3813001300240 @@ -251,7 +251,7 @@

    QF_UFFPDTLIRA (Single Query Track)

    - + cvc5 0 15387.69387.6491532313011 @@ -260,7 +260,7 @@

    QF_UFFPDTLIRA (Single Query Track)

    - + cvc5 - fixedn 0 15387.96787.9841532313011 @@ -284,7 +284,6 @@

    QF_UFFPDTLIRA (Single Query Track)

    - + - diff --git a/archive/2021/results/qf-uffpdtlira-unsat-core.html b/archive/2021/results/qf-uffpdtlira-unsat-core.html index 88270bce..c3631305 100644 --- a/archive/2021/results/qf-uffpdtlira-unsat-core.html +++ b/archive/2021/results/qf-uffpdtlira-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFFPDTLIRA (Unsat Core Track)

    Competition results for the QF_UFFPDTLIRA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_UFFPDTLIRA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5-uccvc5-uc - - + + @@ -126,7 +126,7 @@

    QF_UFFPDTLIRA (Unsat Core Track)

    - + cvc5-uc 0 2 @@ -148,7 +148,7 @@

    QF_UFFPDTLIRA (Unsat Core Track)

    - + cvc5-uc 0 20.3850.3870 @@ -172,7 +172,6 @@

    QF_UFFPDTLIRA (Unsat Core Track)

    - + - diff --git a/archive/2021/results/qf-ufidl-model-validation.html b/archive/2021/results/qf-ufidl-model-validation.html index d74c64cc..9f53218f 100644 --- a/archive/2021/results/qf-ufidl-model-validation.html +++ b/archive/2021/results/qf-ufidl-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFIDL (Model Validation Track)

    Competition results for the QF_UFIDL - + logic - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_UFIDL (Model Validation Track)

    Sequential PerformanceParallel Performance SMTInterpolSMTInterpol - - + + @@ -126,7 +126,7 @@

    QF_UFIDL (Model Validation Track)

    - + SMTInterpol 0 198 @@ -137,7 +137,7 @@

    QF_UFIDL (Model Validation Track)

    - + z3-mvn 0 177 @@ -148,7 +148,7 @@

    QF_UFIDL (Model Validation Track)

    - + cvc5-mv 0 167 @@ -159,7 +159,7 @@

    QF_UFIDL (Model Validation Track)

    - + Yices2 model-validation 0 140 @@ -170,7 +170,7 @@

    QF_UFIDL (Model Validation Track)

    - + MathSAT5n 0 106 @@ -192,7 +192,7 @@

    QF_UFIDL (Model Validation Track)

    - + SMTInterpol 0 19921714.74820259.3377 @@ -201,7 +201,7 @@

    QF_UFIDL (Model Validation Track)

    - + z3-mvn 0 17738020.19737989.57229 @@ -210,7 +210,7 @@

    QF_UFIDL (Model Validation Track)

    - + cvc5-mv 0 16770814.49770819.91939 @@ -219,7 +219,7 @@

    QF_UFIDL (Model Validation Track)

    - + Yices2 model-validation 0 14093222.49293224.73866 @@ -228,7 +228,7 @@

    QF_UFIDL (Model Validation Track)

    - + MathSAT5n 0 106120040.64120040.647100 @@ -252,7 +252,6 @@

    QF_UFIDL (Model Validation Track)

    - + - diff --git a/archive/2021/results/qf-ufidl-single-query.html b/archive/2021/results/qf-ufidl-single-query.html index ecb3cc8f..868a72c1 100644 --- a/archive/2021/results/qf-ufidl-single-query.html +++ b/archive/2021/results/qf-ufidl-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFIDL (Single Query Track)

    Competition results for the QF_UFIDL - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_UFIDL (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) SMTInterpolSMTInterpolSMTInterpol - - + + Yices2 - - + + veriT - + @@ -131,7 +131,7 @@

    QF_UFIDL (Single Query Track)

    - + SMTInterpol 0 252 @@ -142,7 +142,7 @@

    QF_UFIDL (Single Query Track)

    - + z3n 0 248 @@ -153,7 +153,7 @@

    QF_UFIDL (Single Query Track)

    - + 2019-Yices 2.6.2n 0 238 @@ -164,7 +164,7 @@

    QF_UFIDL (Single Query Track)

    - + Yices2 0 237 @@ -175,7 +175,7 @@

    QF_UFIDL (Single Query Track)

    - + cvc5 0 231 @@ -186,7 +186,7 @@

    QF_UFIDL (Single Query Track)

    - + cvc5 - fixedn 0 230 @@ -197,7 +197,7 @@

    QF_UFIDL (Single Query Track)

    - + veriT 0 224 @@ -208,7 +208,7 @@

    QF_UFIDL (Single Query Track)

    - + MathSAT5n 0 224 @@ -230,7 +230,7 @@

    QF_UFIDL (Single Query Track)

    - + SMTInterpol 0 25381483.59576807.534253891644747 @@ -239,7 +239,7 @@

    QF_UFIDL (Single Query Track)

    - + z3n 0 24880571.61380573.919248711775252 @@ -248,7 +248,7 @@

    QF_UFIDL (Single Query Track)

    - + 2019-Yices 2.6.2n 0 23895982.33595983.172238661726262 @@ -257,7 +257,7 @@

    QF_UFIDL (Single Query Track)

    - + Yices2 0 23795042.51995044.436237661716363 @@ -266,7 +266,7 @@

    QF_UFIDL (Single Query Track)

    - + cvc5 0 231116629.265116629.341231691626969 @@ -275,7 +275,7 @@

    QF_UFIDL (Single Query Track)

    - + cvc5 - fixedn 0 230116724.852116730.57230691617070 @@ -284,7 +284,7 @@

    QF_UFIDL (Single Query Track)

    - + veriT 0 22499161.98399156.432224821427673 @@ -293,7 +293,7 @@

    QF_UFIDL (Single Query Track)

    - + MathSAT5n 0 224107941.139107936.601224441807676 @@ -313,7 +313,7 @@

    QF_UFIDL (Single Query Track)

    - + SMTInterpol 0 895069.2114386.10289890121047 @@ -322,7 +322,7 @@

    QF_UFIDL (Single Query Track)

    - + veriT 0 8213963.54813957.11982820821073 @@ -331,7 +331,7 @@

    QF_UFIDL (Single Query Track)

    - + z3n 0 7125356.80525357.075717101921052 @@ -340,7 +340,7 @@

    QF_UFIDL (Single Query Track)

    - + cvc5 - fixedn 0 6934193.51134195.235696902121070 @@ -349,7 +349,7 @@

    QF_UFIDL (Single Query Track)

    - + cvc5 0 6934287.98534290.369696902121069 @@ -358,7 +358,7 @@

    QF_UFIDL (Single Query Track)

    - + Yices2 0 6637133.82837134.999666602421063 @@ -367,7 +367,7 @@

    QF_UFIDL (Single Query Track)

    - + 2019-Yices 2.6.2n 0 6637496.79637497.76666602421062 @@ -376,7 +376,7 @@

    QF_UFIDL (Single Query Track)

    - + MathSAT5n 0 4455220.93655220.939444404621076 @@ -396,7 +396,7 @@

    QF_UFIDL (Single Query Track)

    - + MathSAT5n 0 18052720.20352715.6621800180309076 @@ -405,7 +405,7 @@

    QF_UFIDL (Single Query Track)

    - + z3n 0 17755214.80855216.8441770177339052 @@ -414,7 +414,7 @@

    QF_UFIDL (Single Query Track)

    - + 2019-Yices 2.6.2n 0 17258485.53958485.4111720172389062 @@ -423,7 +423,7 @@

    QF_UFIDL (Single Query Track)

    - + Yices2 0 17157908.69157909.4371710171399063 @@ -432,7 +432,7 @@

    QF_UFIDL (Single Query Track)

    - + SMTInterpol 0 16476414.38472421.4321640164469047 @@ -441,7 +441,7 @@

    QF_UFIDL (Single Query Track)

    - + cvc5 0 16282341.2882338.9721620162489069 @@ -450,7 +450,7 @@

    QF_UFIDL (Single Query Track)

    - + cvc5 - fixedn 0 16182531.34182535.3351610161499070 @@ -459,7 +459,7 @@

    QF_UFIDL (Single Query Track)

    - + veriT 0 14285198.43585199.3121420142689073 @@ -479,7 +479,7 @@

    QF_UFIDL (Single Query Track)

    - + z3n 0 2142318.3262318.751214671478686 @@ -488,7 +488,7 @@

    QF_UFIDL (Single Query Track)

    - + veriT 0 1992828.7722822.0419971128101101 @@ -497,7 +497,7 @@

    QF_UFIDL (Single Query Track)

    - + 2019-Yices 2.6.2n 0 1952641.9382640.44619544151105105 @@ -506,7 +506,7 @@

    QF_UFIDL (Single Query Track)

    - + Yices2 0 1952643.4132643.52619544151105105 @@ -515,7 +515,7 @@

    QF_UFIDL (Single Query Track)

    - + SMTInterpol 0 1885095.7463740.10518869119112112 @@ -524,7 +524,7 @@

    QF_UFIDL (Single Query Track)

    - + MathSAT5n 0 1683714.0883706.72616844124132132 @@ -533,7 +533,7 @@

    QF_UFIDL (Single Query Track)

    - + cvc5 0 1384423.644418.3091384593162162 @@ -542,7 +542,7 @@

    QF_UFIDL (Single Query Track)

    - + cvc5 - fixedn 0 1384421.6394421.7181384593162162 @@ -566,7 +566,6 @@

    QF_UFIDL (Single Query Track)

    - + - diff --git a/archive/2021/results/qf-ufidl-unsat-core.html b/archive/2021/results/qf-ufidl-unsat-core.html index fcafa7ef..1d32ec4f 100644 --- a/archive/2021/results/qf-ufidl-unsat-core.html +++ b/archive/2021/results/qf-ufidl-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFIDL (Unsat Core Track)

    Competition results for the QF_UFIDL - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_UFIDL (Unsat Core Track)

    Sequential PerformanceParallel Performance Yices2Yices2 - - + + @@ -126,7 +126,7 @@

    QF_UFIDL (Unsat Core Track)

    - + MathSAT5n 0 855767 @@ -137,7 +137,7 @@

    QF_UFIDL (Unsat Core Track)

    - + 2020-Yices2-fixedn 0 794777 @@ -148,7 +148,7 @@

    QF_UFIDL (Unsat Core Track)

    - + Yices2 0 794777 @@ -159,7 +159,7 @@

    QF_UFIDL (Unsat Core Track)

    - + 2020-z3n 0 431304 @@ -170,7 +170,7 @@

    QF_UFIDL (Unsat Core Track)

    - + SMTInterpol 0 369436 @@ -181,7 +181,7 @@

    QF_UFIDL (Unsat Core Track)

    - + SMTInterpol-remus 0 352449 @@ -192,7 +192,7 @@

    QF_UFIDL (Unsat Core Track)

    - + z3n 0 308810 @@ -203,7 +203,7 @@

    QF_UFIDL (Unsat Core Track)

    - + 2020-CVC4-ucn 0 179150 @@ -214,7 +214,7 @@

    QF_UFIDL (Unsat Core Track)

    - + cvc5-uc 0 24270 @@ -236,7 +236,7 @@

    QF_UFIDL (Unsat Core Track)

    - + MathSAT5n 0 85576759621.99659629.50132 @@ -245,7 +245,7 @@

    QF_UFIDL (Unsat Core Track)

    - + 2020-Yices2-fixedn 0 79477774001.20474004.14542 @@ -254,7 +254,7 @@

    QF_UFIDL (Unsat Core Track)

    - + Yices2 0 79477775117.12575120.49942 @@ -263,7 +263,7 @@

    QF_UFIDL (Unsat Core Track)

    - + 2020-z3n 0 43130485462.36685465.02359 @@ -272,7 +272,7 @@

    QF_UFIDL (Unsat Core Track)

    - + SMTInterpol 0 379788100257.98797643.28369 @@ -281,7 +281,7 @@

    QF_UFIDL (Unsat Core Track)

    - + SMTInterpol-remus 0 379788102251.96399505.6769 @@ -290,7 +290,7 @@

    QF_UFIDL (Unsat Core Track)

    - + z3n 0 30881091013.30791015.94962 @@ -299,7 +299,7 @@

    QF_UFIDL (Unsat Core Track)

    - + 2020-CVC4-ucn 0 179150101925.755101929.16669 @@ -308,7 +308,7 @@

    QF_UFIDL (Unsat Core Track)

    - + cvc5-uc 0 24270117818.336117819.01695 @@ -332,7 +332,6 @@

    QF_UFIDL (Unsat Core Track)

    - + - diff --git a/archive/2021/results/qf-uflia-incremental.html b/archive/2021/results/qf-uflia-incremental.html index 42bbc1ee..6f65b0a9 100644 --- a/archive/2021/results/qf-uflia-incremental.html +++ b/archive/2021/results/qf-uflia-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFLIA (Incremental Track)

    Competition results for the QF_UFLIA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_UFLIA (Incremental Track)

    Parallel Performance Yices2 incremental - - + + @@ -124,7 +124,7 @@

    QF_UFLIA (Incremental Track)

    - + 2018-Z3 (incremental)n 0 49013683353.76983337.521495862 @@ -133,7 +133,7 @@

    QF_UFLIA (Incremental Track)

    - + z3n 0 48884186026.43885982.112625366 @@ -142,7 +142,7 @@

    QF_UFLIA (Incremental Track)

    - + 2020-z3n 0 48866186460.14186423.276643367 @@ -151,7 +151,7 @@

    QF_UFLIA (Incremental Track)

    - + MathSAT5n 0 486421122284.659122266.196867399 @@ -160,7 +160,7 @@

    QF_UFLIA (Incremental Track)

    - + Yices2 incremental 0 48553893879.94993865.522955673 @@ -169,7 +169,7 @@

    QF_UFLIA (Incremental Track)

    - + SMTInterpol 0 485222120678.94119331.101987294 @@ -178,7 +178,7 @@

    QF_UFLIA (Incremental Track)

    - + cvc5-inc 0 484138115219.541115190.9241095687 @@ -202,7 +202,6 @@

    QF_UFLIA (Incremental Track)

    - + - diff --git a/archive/2021/results/qf-uflia-model-validation.html b/archive/2021/results/qf-uflia-model-validation.html index 0252409b..83af00aa 100644 --- a/archive/2021/results/qf-uflia-model-validation.html +++ b/archive/2021/results/qf-uflia-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFLIA (Model Validation Track)

    Competition results for the QF_UFLIA - + logic - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_UFLIA (Model Validation Track)

    Sequential PerformanceParallel Performance Yices2 model-validationYices2 model-validation - - + + @@ -126,7 +126,7 @@

    QF_UFLIA (Model Validation Track)

    - + Yices2 model-validation 0 300 @@ -137,7 +137,7 @@

    QF_UFLIA (Model Validation Track)

    - + z3-mvn 0 300 @@ -148,7 +148,7 @@

    QF_UFLIA (Model Validation Track)

    - + cvc5-mv 0 300 @@ -159,7 +159,7 @@

    QF_UFLIA (Model Validation Track)

    - + SMTInterpol 0 300 @@ -170,7 +170,7 @@

    QF_UFLIA (Model Validation Track)

    - + MathSAT5n 0 300 @@ -192,7 +192,7 @@

    QF_UFLIA (Model Validation Track)

    - + Yices2 model-validation 0 3006.1997.0770 @@ -201,7 +201,7 @@

    QF_UFLIA (Model Validation Track)

    - + z3-mvn 0 30034.24633.0420 @@ -210,7 +210,7 @@

    QF_UFLIA (Model Validation Track)

    - + cvc5-mv 0 300190.793190.6870 @@ -219,7 +219,7 @@

    QF_UFLIA (Model Validation Track)

    - + SMTInterpol 0 3001312.714632.1880 @@ -228,7 +228,7 @@

    QF_UFLIA (Model Validation Track)

    - + MathSAT5n 0 3002125.4092125.650 @@ -252,7 +252,6 @@

    QF_UFLIA (Model Validation Track)

    - + - diff --git a/archive/2021/results/qf-uflia-single-query.html b/archive/2021/results/qf-uflia-single-query.html index 94639a1c..a4fe9c05 100644 --- a/archive/2021/results/qf-uflia-single-query.html +++ b/archive/2021/results/qf-uflia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFLIA (Single Query Track)

    Competition results for the QF_UFLIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_UFLIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Yices2Yices2Yices2 - - + + Yices2 - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_UFLIA (Single Query Track)

    - + z3n 0 300 @@ -142,7 +142,7 @@

    QF_UFLIA (Single Query Track)

    - + 2018-Yicesn 0 300 @@ -153,7 +153,7 @@

    QF_UFLIA (Single Query Track)

    - + Yices2 0 300 @@ -164,7 +164,7 @@

    QF_UFLIA (Single Query Track)

    - + cvc5 - fixedn 0 300 @@ -175,7 +175,7 @@

    QF_UFLIA (Single Query Track)

    - + cvc5 0 300 @@ -186,7 +186,7 @@

    QF_UFLIA (Single Query Track)

    - + SMTInterpol 0 300 @@ -197,7 +197,7 @@

    QF_UFLIA (Single Query Track)

    - + MathSAT5n 0 299 @@ -208,7 +208,7 @@

    QF_UFLIA (Single Query Track)

    - + veriT 0 257 @@ -230,7 +230,7 @@

    QF_UFLIA (Single Query Track)

    - + z3n 0 30061.32461.3613002386200 @@ -239,7 +239,7 @@

    QF_UFLIA (Single Query Track)

    - + 2018-Yicesn 0 30067.68564.8583002386200 @@ -248,7 +248,7 @@

    QF_UFLIA (Single Query Track)

    - + Yices2 0 30079.6679.5733002386200 @@ -257,7 +257,7 @@

    QF_UFLIA (Single Query Track)

    - + cvc5 - fixedn 0 300544.249544.2713002386200 @@ -266,7 +266,7 @@

    QF_UFLIA (Single Query Track)

    - + cvc5 0 300548.988548.9843002386200 @@ -275,7 +275,7 @@

    QF_UFLIA (Single Query Track)

    - + SMTInterpol 0 3001775.701980.1973002386200 @@ -284,7 +284,7 @@

    QF_UFLIA (Single Query Track)

    - + MathSAT5n 0 2993676.3033676.9082992386111 @@ -293,7 +293,7 @@

    QF_UFLIA (Single Query Track)

    - + veriT 0 25737592.53337594.328257197604313 @@ -313,7 +313,7 @@

    QF_UFLIA (Single Query Track)

    - + 2018-Yicesn 0 2385.8736.18423823800620 @@ -322,7 +322,7 @@

    QF_UFLIA (Single Query Track)

    - + Yices2 0 2385.6086.33423823800620 @@ -331,7 +331,7 @@

    QF_UFLIA (Single Query Track)

    - + z3n 0 23827.60327.63523823800620 @@ -340,7 +340,7 @@

    QF_UFLIA (Single Query Track)

    - + cvc5 0 238190.453190.4223823800620 @@ -349,7 +349,7 @@

    QF_UFLIA (Single Query Track)

    - + cvc5 - fixedn 0 238191.947191.88423823800620 @@ -358,7 +358,7 @@

    QF_UFLIA (Single Query Track)

    - + SMTInterpol 0 2381058.036515.1323823800620 @@ -367,7 +367,7 @@

    QF_UFLIA (Single Query Track)

    - + MathSAT5n 0 2381868.1611868.69323823800621 @@ -376,7 +376,7 @@

    QF_UFLIA (Single Query Track)

    - + veriT 0 19731327.57831329.1431971970416213 @@ -396,7 +396,7 @@

    QF_UFLIA (Single Query Track)

    - + z3n 0 6233.72133.7266206202380 @@ -405,7 +405,7 @@

    QF_UFLIA (Single Query Track)

    - + 2018-Yicesn 0 6261.81358.6736206202380 @@ -414,7 +414,7 @@

    QF_UFLIA (Single Query Track)

    - + Yices2 0 6274.05273.2396206202380 @@ -423,7 +423,7 @@

    QF_UFLIA (Single Query Track)

    - + cvc5 - fixedn 0 62352.301352.3866206202380 @@ -432,7 +432,7 @@

    QF_UFLIA (Single Query Track)

    - + cvc5 0 62358.535358.5656206202380 @@ -441,7 +441,7 @@

    QF_UFLIA (Single Query Track)

    - + SMTInterpol 0 62717.664465.0686206202380 @@ -450,7 +450,7 @@

    QF_UFLIA (Single Query Track)

    - + MathSAT5n 0 611808.1411808.2156106112381 @@ -459,7 +459,7 @@

    QF_UFLIA (Single Query Track)

    - + veriT 0 606264.9556265.18560060223813 @@ -479,7 +479,7 @@

    QF_UFLIA (Single Query Track)

    - + z3n 0 30061.32461.3613002386200 @@ -488,7 +488,7 @@

    QF_UFLIA (Single Query Track)

    - + 2018-Yicesn 0 29954.97952.1482992386111 @@ -497,7 +497,7 @@

    QF_UFLIA (Single Query Track)

    - + Yices2 0 29959.62359.5332992386111 @@ -506,7 +506,7 @@

    QF_UFLIA (Single Query Track)

    - + cvc5 0 295413.639413.6092952385755 @@ -515,7 +515,7 @@

    QF_UFLIA (Single Query Track)

    - + cvc5 - fixedn 0 295415.797415.7782952385755 @@ -524,7 +524,7 @@

    QF_UFLIA (Single Query Track)

    - + SMTInterpol 0 2901217.103617.641290234561010 @@ -533,7 +533,7 @@

    QF_UFLIA (Single Query Track)

    - + MathSAT5n 0 284523.884524.269284226581616 @@ -542,7 +542,7 @@

    QF_UFLIA (Single Query Track)

    - + veriT 0 1803526.9363526.99618013050120109 @@ -566,7 +566,6 @@

    QF_UFLIA (Single Query Track)

    - + - diff --git a/archive/2021/results/qf-uflia-unsat-core.html b/archive/2021/results/qf-uflia-unsat-core.html index b3dc0a43..9169c69c 100644 --- a/archive/2021/results/qf-uflia-unsat-core.html +++ b/archive/2021/results/qf-uflia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFLIA (Unsat Core Track)

    Competition results for the QF_UFLIA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_UFLIA (Unsat Core Track)

    Sequential PerformanceParallel Performance Yices2Yices2 - - + + @@ -126,7 +126,7 @@

    QF_UFLIA (Unsat Core Track)

    - + 2020-Yices2-fixedn 0 21 @@ -137,7 +137,7 @@

    QF_UFLIA (Unsat Core Track)

    - + Yices2 0 21 @@ -148,7 +148,7 @@

    QF_UFLIA (Unsat Core Track)

    - + cvc5-uc 0 21 @@ -159,7 +159,7 @@

    QF_UFLIA (Unsat Core Track)

    - + MathSAT5n 0 21 @@ -170,7 +170,7 @@

    QF_UFLIA (Unsat Core Track)

    - + z3n 0 21 @@ -181,7 +181,7 @@

    QF_UFLIA (Unsat Core Track)

    - + 2020-z3n 0 21 @@ -192,7 +192,7 @@

    QF_UFLIA (Unsat Core Track)

    - + SMTInterpol 0 21 @@ -203,7 +203,7 @@

    QF_UFLIA (Unsat Core Track)

    - + SMTInterpol-remus 0 21 @@ -214,7 +214,7 @@

    QF_UFLIA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 18 @@ -236,7 +236,7 @@

    QF_UFLIA (Unsat Core Track)

    - + Yices2 0 210.0620.1860 @@ -245,7 +245,7 @@

    QF_UFLIA (Unsat Core Track)

    - + 2020-Yices2-fixedn 0 210.0610.2070 @@ -254,7 +254,7 @@

    QF_UFLIA (Unsat Core Track)

    - + cvc5-uc 0 210.2740.2670 @@ -263,7 +263,7 @@

    QF_UFLIA (Unsat Core Track)

    - + MathSAT5n 0 210.2990.3040 @@ -272,7 +272,7 @@

    QF_UFLIA (Unsat Core Track)

    - + z3n 0 210.3640.3760 @@ -281,7 +281,7 @@

    QF_UFLIA (Unsat Core Track)

    - + 2020-z3n 0 210.6050.6060 @@ -290,7 +290,7 @@

    QF_UFLIA (Unsat Core Track)

    - + SMTInterpol 0 216.7125.00 @@ -299,7 +299,7 @@

    QF_UFLIA (Unsat Core Track)

    - + SMTInterpol-remus 0 2110.2396.8090 @@ -308,7 +308,7 @@

    QF_UFLIA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 180.2340.2690 @@ -332,7 +332,6 @@

    QF_UFLIA (Unsat Core Track)

    - + - diff --git a/archive/2021/results/qf-uflra-incremental.html b/archive/2021/results/qf-uflra-incremental.html index 6d108648..9eb5f83a 100644 --- a/archive/2021/results/qf-uflra-incremental.html +++ b/archive/2021/results/qf-uflra-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFLRA (Incremental Track)

    Competition results for the QF_UFLRA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_UFLRA (Incremental Track)

    Parallel Performance Yices2 incremental - - + + @@ -124,7 +124,7 @@

    QF_UFLRA (Incremental Track)

    - + 2020-z3n 0 1310414102.4213802.69610 @@ -133,7 +133,7 @@

    QF_UFLRA (Incremental Track)

    - + Yices2 incremental 0 1309013469.46413134.742152 @@ -142,7 +142,7 @@

    QF_UFLRA (Incremental Track)

    - + cvc5-inc 0 1304648630.51347934.0495914 @@ -151,7 +151,7 @@

    QF_UFLRA (Incremental Track)

    - + MathSAT5n 0 1293416253.67615942.3741715 @@ -160,7 +160,7 @@

    QF_UFLRA (Incremental Track)

    - + SMTInterpol 0 1260062854.6950406.8850524 @@ -169,7 +169,7 @@

    QF_UFLRA (Incremental Track)

    - + z3n 0 12414136477.111136331.28369183 @@ -193,7 +193,6 @@

    QF_UFLRA (Incremental Track)

    - + - diff --git a/archive/2021/results/qf-uflra-model-validation.html b/archive/2021/results/qf-uflra-model-validation.html index f1a525cb..f63c8b91 100644 --- a/archive/2021/results/qf-uflra-model-validation.html +++ b/archive/2021/results/qf-uflra-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFLRA (Model Validation Track)

    Competition results for the QF_UFLRA - + logic - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_UFLRA (Model Validation Track)

    Sequential PerformanceParallel Performance Yices2 model-validationYices2 model-validation - - + + @@ -126,7 +126,7 @@

    QF_UFLRA (Model Validation Track)

    - + Yices2 model-validation 0 383 @@ -137,7 +137,7 @@

    QF_UFLRA (Model Validation Track)

    - + cvc5-mv 0 383 @@ -148,7 +148,7 @@

    QF_UFLRA (Model Validation Track)

    - + SMTInterpol 0 383 @@ -159,7 +159,7 @@

    QF_UFLRA (Model Validation Track)

    - + z3-mvn 0 238 @@ -170,7 +170,7 @@

    QF_UFLRA (Model Validation Track)

    - + MathSAT5n 0 147 @@ -192,7 +192,7 @@

    QF_UFLRA (Model Validation Track)

    - + Yices2 model-validation 0 3832456.9832453.8182 @@ -201,7 +201,7 @@

    QF_UFLRA (Model Validation Track)

    - + cvc5-mv 0 3833189.0333189.152 @@ -210,7 +210,7 @@

    QF_UFLRA (Model Validation Track)

    - + SMTInterpol 0 3835430.7843727.6892 @@ -219,7 +219,7 @@

    QF_UFLRA (Model Validation Track)

    - + z3-mvn 0 23823752.723552.37314 @@ -228,7 +228,7 @@

    QF_UFLRA (Model Validation Track)

    - + MathSAT5n 0 1472615.1592613.6142 @@ -252,7 +252,6 @@

    QF_UFLRA (Model Validation Track)

    - + - diff --git a/archive/2021/results/qf-uflra-single-query.html b/archive/2021/results/qf-uflra-single-query.html index 54f1d680..af43037d 100644 --- a/archive/2021/results/qf-uflra-single-query.html +++ b/archive/2021/results/qf-uflra-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFLRA (Single Query Track)

    Competition results for the QF_UFLRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_UFLRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Yices2Yices2Yices2 - - + + Yices2 - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_UFLRA (Single Query Track)

    - + Yices2 0 539 @@ -142,7 +142,7 @@

    QF_UFLRA (Single Query Track)

    - + 2019-SMTInterpoln 0 539 @@ -153,7 +153,7 @@

    QF_UFLRA (Single Query Track)

    - + SMTInterpol 0 539 @@ -164,7 +164,7 @@

    QF_UFLRA (Single Query Track)

    - + veriT 0 538 @@ -175,7 +175,7 @@

    QF_UFLRA (Single Query Track)

    - + MathSAT5n 0 538 @@ -186,7 +186,7 @@

    QF_UFLRA (Single Query Track)

    - + cvc5 - fixedn 0 538 @@ -197,7 +197,7 @@

    QF_UFLRA (Single Query Track)

    - + cvc5 0 538 @@ -208,7 +208,7 @@

    QF_UFLRA (Single Query Track)

    - + z3n 0 529 @@ -219,7 +219,7 @@

    QF_UFLRA (Single Query Track)

    - + mc2 0 522 @@ -241,7 +241,7 @@

    QF_UFLRA (Single Query Track)

    - + Yices2 0 5392488.9972491.4853932621322 @@ -250,7 +250,7 @@

    QF_UFLRA (Single Query Track)

    - + 2019-SMTInterpoln 0 5395163.6623823.68653932621322 @@ -259,7 +259,7 @@

    QF_UFLRA (Single Query Track)

    - + SMTInterpol 0 5397844.0634982.97353932621322 @@ -268,7 +268,7 @@

    QF_UFLRA (Single Query Track)

    - + veriT 0 5383746.0953746.18753832521333 @@ -277,7 +277,7 @@

    QF_UFLRA (Single Query Track)

    - + MathSAT5n 0 5383774.153774.38953832521333 @@ -286,7 +286,7 @@

    QF_UFLRA (Single Query Track)

    - + cvc5 - fixedn 0 5384724.6434724.57153832521333 @@ -295,7 +295,7 @@

    QF_UFLRA (Single Query Track)

    - + cvc5 0 5384739.0514739.16453832521333 @@ -304,7 +304,7 @@

    QF_UFLRA (Single Query Track)

    - + z3n 0 52919579.75119578.2765293182111212 @@ -313,7 +313,7 @@

    QF_UFLRA (Single Query Track)

    - + mc2 0 5221326.4721326.805522314208190 @@ -333,7 +333,7 @@

    QF_UFLRA (Single Query Track)

    - + Yices2 0 3261274.9561276.41326326012142 @@ -342,7 +342,7 @@

    QF_UFLRA (Single Query Track)

    - + 2019-SMTInterpoln 0 3262947.9182101.65326326012142 @@ -351,7 +351,7 @@

    QF_UFLRA (Single Query Track)

    - + SMTInterpol 0 3264684.382818.041326326012142 @@ -360,7 +360,7 @@

    QF_UFLRA (Single Query Track)

    - + veriT 0 3252498.4762498.553325325022143 @@ -369,7 +369,7 @@

    QF_UFLRA (Single Query Track)

    - + MathSAT5n 0 3252526.2392526.463325325022143 @@ -378,7 +378,7 @@

    QF_UFLRA (Single Query Track)

    - + cvc5 - fixedn 0 3253266.4043266.404325325022143 @@ -387,7 +387,7 @@

    QF_UFLRA (Single Query Track)

    - + cvc5 0 3253278.143278.255325325022143 @@ -396,7 +396,7 @@

    QF_UFLRA (Single Query Track)

    - + z3n 0 31813097.95813095.9913183180921412 @@ -405,7 +405,7 @@

    QF_UFLRA (Single Query Track)

    - + mc2 0 314832.002832.2183143140132140 @@ -425,7 +425,7 @@

    QF_UFLRA (Single Query Track)

    - + Yices2 0 21314.04115.07213021303282 @@ -434,7 +434,7 @@

    QF_UFLRA (Single Query Track)

    - + veriT 0 21347.61947.634213021303283 @@ -443,7 +443,7 @@

    QF_UFLRA (Single Query Track)

    - + MathSAT5n 0 21347.91147.926213021303283 @@ -452,7 +452,7 @@

    QF_UFLRA (Single Query Track)

    - + cvc5 - fixedn 0 213258.239258.167213021303283 @@ -461,7 +461,7 @@

    QF_UFLRA (Single Query Track)

    - + cvc5 0 213260.911260.909213021303283 @@ -470,7 +470,7 @@

    QF_UFLRA (Single Query Track)

    - + 2019-SMTInterpoln 0 2131015.744522.036213021303282 @@ -479,7 +479,7 @@

    QF_UFLRA (Single Query Track)

    - + SMTInterpol 0 2131959.683964.932213021303282 @@ -488,7 +488,7 @@

    QF_UFLRA (Single Query Track)

    - + z3n 0 2115281.7935282.2852110211232812 @@ -497,7 +497,7 @@

    QF_UFLRA (Single Query Track)

    - + mc2 0 208408.103408.216208020853280 @@ -517,7 +517,7 @@

    QF_UFLRA (Single Query Track)

    - + Yices2 0 538129.858132.33853832521333 @@ -526,7 +526,7 @@

    QF_UFLRA (Single Query Track)

    - + veriT 0 538218.095218.18753832521333 @@ -535,7 +535,7 @@

    QF_UFLRA (Single Query Track)

    - + MathSAT5n 0 536241.312241.54553632321355 @@ -544,7 +544,7 @@

    QF_UFLRA (Single Query Track)

    - + cvc5 - fixedn 0 531415.92415.735313202111010 @@ -553,7 +553,7 @@

    QF_UFLRA (Single Query Track)

    - + cvc5 0 531417.289417.0455313202111010 @@ -562,7 +562,7 @@

    QF_UFLRA (Single Query Track)

    - + 2019-SMTInterpoln 0 5311960.273892.3725313212101010 @@ -571,7 +571,7 @@

    QF_UFLRA (Single Query Track)

    - + SMTInterpol 0 5292391.4361043.8835293192101212 @@ -580,7 +580,7 @@

    QF_UFLRA (Single Query Track)

    - + mc2 0 521502.539502.7595213142072013 @@ -589,7 +589,7 @@

    QF_UFLRA (Single Query Track)

    - + z3n 0 513895.095892.9125133082052828 @@ -613,7 +613,6 @@

    QF_UFLRA (Single Query Track)

    - + - diff --git a/archive/2021/results/qf-uflra-unsat-core.html b/archive/2021/results/qf-uflra-unsat-core.html index df1846f6..7060e3d4 100644 --- a/archive/2021/results/qf-uflra-unsat-core.html +++ b/archive/2021/results/qf-uflra-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFLRA (Unsat Core Track)

    Competition results for the QF_UFLRA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_UFLRA (Unsat Core Track)

    Sequential PerformanceParallel Performance SMTInterpol-remusSMTInterpol-remus - - + + @@ -126,7 +126,7 @@

    QF_UFLRA (Unsat Core Track)

    - + SMTInterpol-remus 0 70 @@ -137,7 +137,7 @@

    QF_UFLRA (Unsat Core Track)

    - + cvc5-uc 0 62 @@ -148,7 +148,7 @@

    QF_UFLRA (Unsat Core Track)

    - + 2020-z3n 0 61 @@ -159,7 +159,7 @@

    QF_UFLRA (Unsat Core Track)

    - + MathSAT5n 0 61 @@ -170,7 +170,7 @@

    QF_UFLRA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 61 @@ -181,7 +181,7 @@

    QF_UFLRA (Unsat Core Track)

    - + Yices2 0 58 @@ -192,7 +192,7 @@

    QF_UFLRA (Unsat Core Track)

    - + 2020-Yices2-fixedn 0 58 @@ -203,7 +203,7 @@

    QF_UFLRA (Unsat Core Track)

    - + z3n 0 58 @@ -214,7 +214,7 @@

    QF_UFLRA (Unsat Core Track)

    - + SMTInterpol 0 57 @@ -236,7 +236,7 @@

    QF_UFLRA (Unsat Core Track)

    - + SMTInterpol-remus 0 702644.2321573.9660 @@ -245,7 +245,7 @@

    QF_UFLRA (Unsat Core Track)

    - + cvc5-uc 0 621083.1621083.2640 @@ -254,7 +254,7 @@

    QF_UFLRA (Unsat Core Track)

    - + 2020-z3n 0 6160.60560.630 @@ -263,7 +263,7 @@

    QF_UFLRA (Unsat Core Track)

    - + MathSAT5n 0 61105.805105.8410 @@ -272,7 +272,7 @@

    QF_UFLRA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 61263.586260.0460 @@ -281,7 +281,7 @@

    QF_UFLRA (Unsat Core Track)

    - + Yices2 0 5820.72621.0670 @@ -290,7 +290,7 @@

    QF_UFLRA (Unsat Core Track)

    - + 2020-Yices2-fixedn 0 5821.121.5230 @@ -299,7 +299,7 @@

    QF_UFLRA (Unsat Core Track)

    - + z3n 0 5811055.54311056.8124 @@ -308,7 +308,7 @@

    QF_UFLRA (Unsat Core Track)

    - + SMTInterpol 0 571694.337982.4990 @@ -332,7 +332,6 @@

    QF_UFLRA (Unsat Core Track)

    - + - diff --git a/archive/2021/results/qf-ufnia-incremental.html b/archive/2021/results/qf-ufnia-incremental.html index ea7b7f96..150e0cb5 100644 --- a/archive/2021/results/qf-ufnia-incremental.html +++ b/archive/2021/results/qf-ufnia-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFNIA (Incremental Track)

    Competition results for the QF_UFNIA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_UFNIA (Incremental Track)

    Parallel Performance cvc5-inc - - + + @@ -124,7 +124,7 @@

    QF_UFNIA (Incremental Track)

    - + cvc5-inc 0 98582507.28493.1300 @@ -133,7 +133,7 @@

    QF_UFNIA (Incremental Track)

    - + z3n 0 98580108.321101.7120 @@ -142,7 +142,7 @@

    QF_UFNIA (Incremental Track)

    - + SMTInterpol 0 964881578.724624.5920940 @@ -151,7 +151,7 @@

    QF_UFNIA (Incremental Track)

    - + MathSAT5n 0 8457340.92644.074140090 @@ -160,7 +160,7 @@

    QF_UFNIA (Incremental Track)

    - + 2020-MathSAT5n 0 8457379.71544.32140090 @@ -169,7 +169,7 @@

    QF_UFNIA (Incremental Track)

    - + Yices2 incremental 0 191801201.7491205.962794021 @@ -193,7 +193,6 @@

    QF_UFNIA (Incremental Track)

    - + - diff --git a/archive/2021/results/qf-ufnia-single-query.html b/archive/2021/results/qf-ufnia-single-query.html index 57db3198..07dccdc7 100644 --- a/archive/2021/results/qf-ufnia-single-query.html +++ b/archive/2021/results/qf-ufnia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFNIA (Single Query Track)

    Competition results for the QF_UFNIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_UFNIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Yices2Yices2Yices2 - - + + cvc5 - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_UFNIA (Single Query Track)

    - + z3n 0 296 @@ -142,7 +142,7 @@

    QF_UFNIA (Single Query Track)

    - + 2020-CVC4n 0 287 @@ -153,7 +153,7 @@

    QF_UFNIA (Single Query Track)

    - + 2019-CVC4n 0 284 @@ -164,7 +164,7 @@

    QF_UFNIA (Single Query Track)

    - + Yices2 0 282 @@ -175,7 +175,7 @@

    QF_UFNIA (Single Query Track)

    - + cvc5 0 270 @@ -186,7 +186,7 @@

    QF_UFNIA (Single Query Track)

    - + cvc5 - fixedn 0 270 @@ -197,7 +197,7 @@

    QF_UFNIA (Single Query Track)

    - + MathSAT5n 0 243 @@ -219,7 +219,7 @@

    QF_UFNIA (Single Query Track)

    - + z3n 0 2964818.2344818.2592962217544 @@ -228,7 +228,7 @@

    QF_UFNIA (Single Query Track)

    - + 2020-CVC4n 0 2871317.5291296.45928721671131 @@ -237,7 +237,7 @@

    QF_UFNIA (Single Query Track)

    - + 2019-CVC4n 0 2846094.2956094.22228421569165 @@ -246,7 +246,7 @@

    QF_UFNIA (Single Query Track)

    - + Yices2 0 28221727.36521727.913282220621818 @@ -255,7 +255,7 @@

    QF_UFNIA (Single Query Track)

    - + cvc5 0 2703698.0413697.91627020466303 @@ -264,7 +264,7 @@

    QF_UFNIA (Single Query Track)

    - + cvc5 - fixedn 0 2703699.1753698.05927020466303 @@ -273,7 +273,7 @@

    QF_UFNIA (Single Query Track)

    - + MathSAT5n 0 24333340.18933340.379243186575727 @@ -293,7 +293,7 @@

    QF_UFNIA (Single Query Track)

    - + z3n 0 22114.62714.64422122100794 @@ -302,7 +302,7 @@

    QF_UFNIA (Single Query Track)

    - + Yices2 0 2201325.4131325.77220220017918 @@ -311,7 +311,7 @@

    QF_UFNIA (Single Query Track)

    - + 2020-CVC4n 0 21663.51842.46621621605791 @@ -320,7 +320,7 @@

    QF_UFNIA (Single Query Track)

    - + 2019-CVC4n 0 2154876.584876.52721521506795 @@ -329,7 +329,7 @@

    QF_UFNIA (Single Query Track)

    - + cvc5 0 2042467.6872467.591204204017793 @@ -338,7 +338,7 @@

    QF_UFNIA (Single Query Track)

    - + cvc5 - fixedn 0 2042468.7152467.63204204017793 @@ -347,7 +347,7 @@

    QF_UFNIA (Single Query Track)

    - + MathSAT5n 0 18618816.4518816.5911861860357927 @@ -367,7 +367,7 @@

    QF_UFNIA (Single Query Track)

    - + z3n 0 751203.6071203.6157507512244 @@ -376,7 +376,7 @@

    QF_UFNIA (Single Query Track)

    - + 2020-CVC4n 0 711206.1421206.1167107152241 @@ -385,7 +385,7 @@

    QF_UFNIA (Single Query Track)

    - + 2019-CVC4n 0 691206.3351206.3166906972245 @@ -394,7 +394,7 @@

    QF_UFNIA (Single Query Track)

    - + cvc5 0 661229.7941229.76766066102243 @@ -403,7 +403,7 @@

    QF_UFNIA (Single Query Track)

    - + cvc5 - fixedn 0 661229.8431229.80966066102243 @@ -412,7 +412,7 @@

    QF_UFNIA (Single Query Track)

    - + Yices2 0 6216801.95216802.143620621422418 @@ -421,7 +421,7 @@

    QF_UFNIA (Single Query Track)

    - + MathSAT5n 0 5710923.73810923.789570571922427 @@ -441,7 +441,7 @@

    QF_UFNIA (Single Query Track)

    - + z3n 0 296114.234114.2592962217544 @@ -450,7 +450,7 @@

    QF_UFNIA (Single Query Track)

    - + 2020-CVC4n 0 287132.249111.17828721671132 @@ -459,7 +459,7 @@

    QF_UFNIA (Single Query Track)

    - + 2019-CVC4n 0 283179.721179.64528321469176 @@ -468,7 +468,7 @@

    QF_UFNIA (Single Query Track)

    - + Yices2 0 280501.817502.363280218622020 @@ -477,7 +477,7 @@

    QF_UFNIA (Single Query Track)

    - + cvc5 0 269168.764168.63726920465314 @@ -486,7 +486,7 @@

    QF_UFNIA (Single Query Track)

    - + cvc5 - fixedn 0 269169.839168.71926920465314 @@ -495,7 +495,7 @@

    QF_UFNIA (Single Query Track)

    - + MathSAT5n 0 240906.646906.76240185556030 @@ -519,7 +519,6 @@

    QF_UFNIA (Single Query Track)

    - + - diff --git a/archive/2021/results/qf-ufnia-unsat-core.html b/archive/2021/results/qf-ufnia-unsat-core.html index 48cb5f85..f78be1ed 100644 --- a/archive/2021/results/qf-ufnia-unsat-core.html +++ b/archive/2021/results/qf-ufnia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFNIA (Unsat Core Track)

    Competition results for the QF_UFNIA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_UFNIA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5-uccvc5-uc - - + + @@ -126,7 +126,7 @@

    QF_UFNIA (Unsat Core Track)

    - + z3n 0 1054 @@ -137,7 +137,7 @@

    QF_UFNIA (Unsat Core Track)

    - + cvc5-uc 0 957 @@ -148,7 +148,7 @@

    QF_UFNIA (Unsat Core Track)

    - + MathSAT5n 0 846 @@ -159,7 +159,7 @@

    QF_UFNIA (Unsat Core Track)

    - + Yices2 0 0 @@ -181,7 +181,7 @@

    QF_UFNIA (Unsat Core Track)

    - + z3n 0 10544.8664.8770 @@ -190,7 +190,7 @@

    QF_UFNIA (Unsat Core Track)

    - + cvc5-uc 0 95714.76414.750 @@ -199,7 +199,7 @@

    QF_UFNIA (Unsat Core Track)

    - + MathSAT5n 0 8467221.47221.4326 @@ -208,7 +208,7 @@

    QF_UFNIA (Unsat Core Track)

    - + Yices2 0 00.6122.1960 @@ -232,7 +232,6 @@

    QF_UFNIA (Unsat Core Track)

    - + - diff --git a/archive/2021/results/qf-ufnra-cloud.html b/archive/2021/results/qf-ufnra-cloud.html index e526203a..053c7a60 100644 --- a/archive/2021/results/qf-ufnra-cloud.html +++ b/archive/2021/results/qf-ufnra-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFNRA (Cloud Track)

    Competition results for the QF_UFNRA - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    QF_UFNRA (Cloud Track)

    - + Par4n 0 23748.89722022 @@ -126,7 +126,7 @@

    QF_UFNRA (Cloud Track)

    - + cvc5-gg 0 03600.000043 @@ -146,7 +146,7 @@

    QF_UFNRA (Cloud Track)

    - + Par4n 0 22548.897220112 @@ -155,7 +155,7 @@

    QF_UFNRA (Cloud Track)

    - + cvc5-gg 0 02400.0000313 @@ -175,7 +175,7 @@

    QF_UFNRA (Cloud Track)

    - + cvc5-gg 0 00.0000043 @@ -184,7 +184,7 @@

    QF_UFNRA (Cloud Track)

    - + Par4n 0 00.0000042 @@ -204,7 +204,7 @@

    QF_UFNRA (Cloud Track)

    - + cvc5-gg 0 072.000043 @@ -213,7 +213,7 @@

    QF_UFNRA (Cloud Track)

    - + Par4n 0 096.000044 @@ -237,7 +237,6 @@

    QF_UFNRA (Cloud Track)

    - + - diff --git a/archive/2021/results/qf-ufnra-incremental.html b/archive/2021/results/qf-ufnra-incremental.html index 8939e940..22eb63a5 100644 --- a/archive/2021/results/qf-ufnra-incremental.html +++ b/archive/2021/results/qf-ufnra-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFNRA (Incremental Track)

    Competition results for the QF_UFNRA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_UFNRA (Incremental Track)

    Parallel Performance cvc5-inc - - + + @@ -124,7 +124,7 @@

    QF_UFNRA (Incremental Track)

    - + z3n 0 60.0330.03200 @@ -133,7 +133,7 @@

    QF_UFNRA (Incremental Track)

    - + cvc5-inc 0 60.0350.03300 @@ -142,7 +142,7 @@

    QF_UFNRA (Incremental Track)

    - + SMTInterpol 0 60.5580.36100 @@ -151,7 +151,7 @@

    QF_UFNRA (Incremental Track)

    - + MathSAT5n 0 00.0050.02160 @@ -175,7 +175,6 @@

    QF_UFNRA (Incremental Track)

    - + - diff --git a/archive/2021/results/qf-ufnra-parallel.html b/archive/2021/results/qf-ufnra-parallel.html index f57e920d..c823f08f 100644 --- a/archive/2021/results/qf-ufnra-parallel.html +++ b/archive/2021/results/qf-ufnra-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFNRA (Parallel Track)

    Competition results for the QF_UFNRA - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    QF_UFNRA (Parallel Track)

    - + Par4n 0 23826.77922022 @@ -126,7 +126,7 @@

    QF_UFNRA (Parallel Track)

    - + cvc5-gg 0 04800.000044 @@ -146,7 +146,7 @@

    QF_UFNRA (Parallel Track)

    - + Par4n 0 22626.779220112 @@ -155,7 +155,7 @@

    QF_UFNRA (Parallel Track)

    - + cvc5-gg 0 03600.0000314 @@ -175,7 +175,7 @@

    QF_UFNRA (Parallel Track)

    - + cvc5-gg 0 00.0000044 @@ -184,7 +184,7 @@

    QF_UFNRA (Parallel Track)

    - + Par4n 0 00.0000042 @@ -204,7 +204,7 @@

    QF_UFNRA (Parallel Track)

    - + cvc5-gg 0 096.000044 @@ -213,7 +213,7 @@

    QF_UFNRA (Parallel Track)

    - + Par4n 0 096.000044 @@ -237,7 +237,6 @@

    QF_UFNRA (Parallel Track)

    - + - diff --git a/archive/2021/results/qf-ufnra-single-query.html b/archive/2021/results/qf-ufnra-single-query.html index 4d81284b..d0e2fc21 100644 --- a/archive/2021/results/qf-ufnra-single-query.html +++ b/archive/2021/results/qf-ufnra-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFNRA (Single Query Track)

    Competition results for the QF_UFNRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_UFNRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Yices2Yices2Yices2 - - + + cvc5 - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_UFNRA (Single Query Track)

    - + z3n 0 23 @@ -142,7 +142,7 @@

    QF_UFNRA (Single Query Track)

    - + 2019-Par4n 0 23 @@ -153,7 +153,7 @@

    QF_UFNRA (Single Query Track)

    - + Yices2 0 23 @@ -164,7 +164,7 @@

    QF_UFNRA (Single Query Track)

    - + 2020-CVC4n 0 13 @@ -175,7 +175,7 @@

    QF_UFNRA (Single Query Track)

    - + MathSAT5n 0 8 @@ -186,7 +186,7 @@

    QF_UFNRA (Single Query Track)

    - + cvc5 0 3 @@ -197,7 +197,7 @@

    QF_UFNRA (Single Query Track)

    - + cvc5 - fixedn 0 3 @@ -208,7 +208,7 @@

    QF_UFNRA (Single Query Track)

    - + veriT+raSAT+Redlog 0 1 @@ -230,7 +230,7 @@

    QF_UFNRA (Single Query Track)

    - + 2019-Par4n 0 256813.0494609.662523222 @@ -239,7 +239,7 @@

    QF_UFNRA (Single Query Track)

    - + z3n 0 234981.3334972.2532321244 @@ -248,7 +248,7 @@

    QF_UFNRA (Single Query Track)

    - + Yices2 0 2311499.58311500.1942321244 @@ -257,7 +257,7 @@

    QF_UFNRA (Single Query Track)

    - + 2020-CVC4n 0 1318215.86418216.174131121414 @@ -266,7 +266,7 @@

    QF_UFNRA (Single Query Track)

    - + MathSAT5n 0 821805.63521781.0118621918 @@ -275,7 +275,7 @@

    QF_UFNRA (Single Query Track)

    - + cvc5 0 328801.27528801.2743122424 @@ -284,7 +284,7 @@

    QF_UFNRA (Single Query Track)

    - + cvc5 - fixedn 0 328801.34628801.3483122424 @@ -293,7 +293,7 @@

    QF_UFNRA (Single Query Track)

    - + veriT+raSAT+Redlog 0 131200.00831200.0111102626 @@ -313,7 +313,7 @@

    QF_UFNRA (Single Query Track)

    - + 2019-Par4n 0 235613.0363409.29623230132 @@ -322,7 +322,7 @@

    QF_UFNRA (Single Query Track)

    - + z3n 0 213781.0043771.92421210334 @@ -331,7 +331,7 @@

    QF_UFNRA (Single Query Track)

    - + Yices2 0 2110291.70610292.31621210334 @@ -340,7 +340,7 @@

    QF_UFNRA (Single Query Track)

    - + 2020-CVC4n 0 1117013.77417014.0851111013314 @@ -349,7 +349,7 @@

    QF_UFNRA (Single Query Track)

    - + MathSAT5n 0 620605.39820580.77366018318 @@ -358,7 +358,7 @@

    QF_UFNRA (Single Query Track)

    - + veriT+raSAT+Redlog 0 127600.00827600.01111023326 @@ -367,7 +367,7 @@

    QF_UFNRA (Single Query Track)

    - + cvc5 0 127600.02127600.02111023324 @@ -376,7 +376,7 @@

    QF_UFNRA (Single Query Track)

    - + cvc5 - fixedn 0 127600.02227600.02111023324 @@ -396,7 +396,7 @@

    QF_UFNRA (Single Query Track)

    - + MathSAT5n 0 20.2370.23720202518 @@ -405,7 +405,7 @@

    QF_UFNRA (Single Query Track)

    - + z3n 0 20.3290.3292020254 @@ -414,7 +414,7 @@

    QF_UFNRA (Single Query Track)

    - + 2019-Par4n 0 20.0130.3642020252 @@ -423,7 +423,7 @@

    QF_UFNRA (Single Query Track)

    - + cvc5 0 21.2541.25320202524 @@ -432,7 +432,7 @@

    QF_UFNRA (Single Query Track)

    - + cvc5 - fixedn 0 21.3251.32720202524 @@ -441,7 +441,7 @@

    QF_UFNRA (Single Query Track)

    - + 2020-CVC4n 0 22.092.08920202514 @@ -450,7 +450,7 @@

    QF_UFNRA (Single Query Track)

    - + Yices2 0 27.8777.8782020254 @@ -459,7 +459,7 @@

    QF_UFNRA (Single Query Track)

    - + veriT+raSAT+Redlog 0 02400.02400.000022526 @@ -479,7 +479,7 @@

    QF_UFNRA (Single Query Track)

    - + z3n 0 23277.333268.2532321244 @@ -488,7 +488,7 @@

    QF_UFNRA (Single Query Track)

    - + 2019-Par4n 0 18440.449330.6551816299 @@ -497,7 +497,7 @@

    QF_UFNRA (Single Query Track)

    - + Yices2 0 5544.795544.85322222 @@ -506,7 +506,7 @@

    QF_UFNRA (Single Query Track)

    - + MathSAT5n 0 5547.025547.0395322221 @@ -515,7 +515,7 @@

    QF_UFNRA (Single Query Track)

    - + cvc5 0 3577.275577.2743122424 @@ -524,7 +524,7 @@

    QF_UFNRA (Single Query Track)

    - + cvc5 - fixedn 0 3577.346577.3483122424 @@ -533,7 +533,7 @@

    QF_UFNRA (Single Query Track)

    - + 2020-CVC4n 0 3578.109578.1083122424 @@ -542,7 +542,7 @@

    QF_UFNRA (Single Query Track)

    - + veriT+raSAT+Redlog 0 1624.008624.0111102626 @@ -566,7 +566,6 @@

    QF_UFNRA (Single Query Track)

    - + - diff --git a/archive/2021/results/qf-ufnra-unsat-core.html b/archive/2021/results/qf-ufnra-unsat-core.html index 3a84cf26..aa5ae65b 100644 --- a/archive/2021/results/qf-ufnra-unsat-core.html +++ b/archive/2021/results/qf-ufnra-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFNRA (Unsat Core Track)

    Competition results for the QF_UFNRA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_UFNRA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5-uccvc5-uc - - + + @@ -126,7 +126,7 @@

    QF_UFNRA (Unsat Core Track)

    - + z3n 0 66 @@ -137,7 +137,7 @@

    QF_UFNRA (Unsat Core Track)

    - + MathSAT5n 0 66 @@ -148,7 +148,7 @@

    QF_UFNRA (Unsat Core Track)

    - + cvc5-uc 0 58 @@ -159,7 +159,7 @@

    QF_UFNRA (Unsat Core Track)

    - + Yices2 0 0 @@ -181,7 +181,7 @@

    QF_UFNRA (Unsat Core Track)

    - + z3n 0 660.4430.4430 @@ -190,7 +190,7 @@

    QF_UFNRA (Unsat Core Track)

    - + MathSAT5n 0 660.4450.4460 @@ -199,7 +199,7 @@

    QF_UFNRA (Unsat Core Track)

    - + cvc5-uc 0 5824.88824.8980 @@ -208,7 +208,7 @@

    QF_UFNRA (Unsat Core Track)

    - + Yices2 0 00.0380.1410 @@ -232,7 +232,6 @@

    QF_UFNRA (Unsat Core Track)

    - + - diff --git a/archive/2021/results/results-cloud.html b/archive/2021/results/results-cloud.html index 764126e8..0052a57e 100644 --- a/archive/2021/results/results-cloud.html +++ b/archive/2021/results/results-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -182,7 +182,6 @@

    SMT-COMP 2021 Results - Cloud Track (Summary)

    - + - diff --git a/archive/2021/results/results-incremental.html b/archive/2021/results/results-incremental.html index bc4a3c36..8a11af3c 100644 --- a/archive/2021/results/results-incremental.html +++ b/archive/2021/results/results-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -142,7 +142,6 @@

    SMT-COMP 2021 Results - Incremental Track (Summary)

    - + - diff --git a/archive/2021/results/results-model-validation.html b/archive/2021/results/results-model-validation.html index 7907a6a6..bafa127b 100644 --- a/archive/2021/results/results-model-validation.html +++ b/archive/2021/results/results-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -121,7 +121,6 @@

    SMT-COMP 2021 Results - Model Validation Track (Summary)

    - + - diff --git a/archive/2021/results/results-parallel.html b/archive/2021/results/results-parallel.html index 427b2e9f..04ce0bdd 100644 --- a/archive/2021/results/results-parallel.html +++ b/archive/2021/results/results-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -182,7 +182,6 @@

    SMT-COMP 2021 Results - Parallel Track (Summary)

    - + - diff --git a/archive/2021/results/results-single-query.html b/archive/2021/results/results-single-query.html index 8882fbcb..72b8415a 100644 --- a/archive/2021/results/results-single-query.html +++ b/archive/2021/results/results-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -226,7 +226,6 @@

    SMT-COMP 2021 Results - Single Query Track (Summary)

    - + - diff --git a/archive/2021/results/results-unsat-core.html b/archive/2021/results/results-unsat-core.html index 3303115c..a9fe3500 100644 --- a/archive/2021/results/results-unsat-core.html +++ b/archive/2021/results/results-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -167,7 +167,6 @@

    SMT-COMP 2021 Results - Unsat Core Track (Summary)

    - + - diff --git a/archive/2021/results/uf-cloud.html b/archive/2021/results/uf-cloud.html index 94c40cf5..b7baa5ea 100644 --- a/archive/2021/results/uf-cloud.html +++ b/archive/2021/results/uf-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UF (Cloud Track)

    Competition results for the UF - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    UF (Cloud Track)

    - + Vampire 0 710999.62473499 @@ -126,7 +126,7 @@

    UF (Cloud Track)

    - + Par4n 0 217564.0432111414 @@ -135,7 +135,7 @@

    UF (Cloud Track)

    - + cvc5-gg 0 019200.00001616 @@ -155,7 +155,7 @@

    UF (Cloud Track)

    - + Vampire 0 31319.3783301129 @@ -164,7 +164,7 @@

    UF (Cloud Track)

    - + Par4n 0 14173.40711031214 @@ -173,7 +173,7 @@

    UF (Cloud Track)

    - + cvc5-gg 0 04800.000041216 @@ -193,7 +193,7 @@

    UF (Cloud Track)

    - + Vampire 0 480.2464040129 @@ -202,7 +202,7 @@

    UF (Cloud Track)

    - + Par4n 0 13790.63610131214 @@ -211,7 +211,7 @@

    UF (Cloud Track)

    - + cvc5-gg 0 04800.000041216 @@ -231,7 +231,7 @@

    UF (Cloud Track)

    - + Vampire 0 5337.2475231111 @@ -240,7 +240,7 @@

    UF (Cloud Track)

    - + cvc5-gg 0 0384.00001616 @@ -249,7 +249,7 @@

    UF (Cloud Track)

    - + Par4n 0 0384.00001616 @@ -273,7 +273,6 @@

    UF (Cloud Track)

    - + - diff --git a/archive/2021/results/uf-incremental.html b/archive/2021/results/uf-incremental.html index 0c5d4e94..75dee16a 100644 --- a/archive/2021/results/uf-incremental.html +++ b/archive/2021/results/uf-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UF (Incremental Track)

    Competition results for the UF - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    UF (Incremental Track)

    Parallel Performance cvc5-inc - - + + @@ -124,7 +124,7 @@

    UF (Incremental Track)

    - + 2020-z3n 0 977721187227.5641187144.895260326920 @@ -133,7 +133,7 @@

    UF (Incremental Track)

    - + z3n 0 973151195413.481195294.115260783919 @@ -142,7 +142,7 @@

    UF (Incremental Track)

    - + cvc5-inc 0 23524999859.822999780.802334574823 @@ -151,7 +151,7 @@

    UF (Incremental Track)

    - + SMTInterpol 0 154262110948.1982088016.6413426721662 @@ -160,7 +160,7 @@

    UF (Incremental Track)

    - + UltimateEliminator+MathSAT 0 08917.93931.1443580980 @@ -184,7 +184,6 @@

    UF (Incremental Track)

    - + - diff --git a/archive/2021/results/uf-parallel.html b/archive/2021/results/uf-parallel.html index cf867b60..8e506a01 100644 --- a/archive/2021/results/uf-parallel.html +++ b/archive/2021/results/uf-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UF (Parallel Track)

    Competition results for the UF - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    UF (Parallel Track)

    - + Vampire 0 811357.26983599 @@ -126,7 +126,7 @@

    UF (Parallel Track)

    - + Par4n 0 218763.2752111515 @@ -135,7 +135,7 @@

    UF (Parallel Track)

    - + cvc5-gg 0 01200.0000171 @@ -155,7 +155,7 @@

    UF (Parallel Track)

    - + Vampire 0 31522.1813301139 @@ -164,7 +164,7 @@

    UF (Parallel Track)

    - + Par4n 0 14173.31911031315 @@ -173,7 +173,7 @@

    UF (Parallel Track)

    - + cvc5-gg 0 01200.00004131 @@ -193,7 +193,7 @@

    UF (Parallel Track)

    - + Vampire 0 5235.0885050129 @@ -202,7 +202,7 @@

    UF (Parallel Track)

    - + Par4n 0 14989.95610141215 @@ -211,7 +211,7 @@

    UF (Parallel Track)

    - + cvc5-gg 0 00.00005121 @@ -231,7 +231,7 @@

    UF (Parallel Track)

    - + Vampire 0 5323.5725141212 @@ -240,7 +240,7 @@

    UF (Parallel Track)

    - + cvc5-gg 0 024.0000171 @@ -249,7 +249,7 @@

    UF (Parallel Track)

    - + Par4n 0 0408.00001717 @@ -273,7 +273,6 @@

    UF (Parallel Track)

    - + - diff --git a/archive/2021/results/uf-single-query.html b/archive/2021/results/uf-single-query.html index d8bb6fe7..4ffb486e 100644 --- a/archive/2021/results/uf-single-query.html +++ b/archive/2021/results/uf-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UF (Single Query Track)

    Competition results for the UF - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UF (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) VampireVampireVampire - - + + cvc5 - - + + Vampire - + @@ -131,7 +131,7 @@

    UF (Single Query Track)

    - + Vampire 0 1153 @@ -142,7 +142,7 @@

    UF (Single Query Track)

    - + 2020-CVC4n 0 1144 @@ -153,7 +153,7 @@

    UF (Single Query Track)

    - + 2020-Vampiren 0 1142 @@ -164,7 +164,7 @@

    UF (Single Query Track)

    - + Vampire - fixedn 0 1130 @@ -175,7 +175,7 @@

    UF (Single Query Track)

    - + cvc5 0 933 @@ -186,7 +186,7 @@

    UF (Single Query Track)

    - + iProver - fixed2n 0 744 @@ -197,7 +197,7 @@

    UF (Single Query Track)

    - + iProver - fixedn 0 733 @@ -208,7 +208,7 @@

    UF (Single Query Track)

    - + veriT 0 693 @@ -219,7 +219,7 @@

    UF (Single Query Track)

    - + z3n 0 462 @@ -230,7 +230,7 @@

    UF (Single Query Track)

    - + Yices2 0 349 @@ -241,7 +241,7 @@

    UF (Single Query Track)

    - + SMTInterpol 0 221 @@ -252,7 +252,7 @@

    UF (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -263,7 +263,7 @@

    UF (Single Query Track)

    - + iProver 12 827 @@ -285,7 +285,7 @@

    UF (Single Query Track)

    - + Vampire 0 11942176451.0712041645.326119445474016631662 @@ -294,7 +294,7 @@

    UF (Single Query Track)

    - + 2020-Vampiren 0 11782149905.7982031233.621117845272616791658 @@ -303,7 +303,7 @@

    UF (Single Query Track)

    - + Vampire - fixedn 0 11722180112.4062043509.724117244572716851664 @@ -312,7 +312,7 @@

    UF (Single Query Track)

    - + 2020-CVC4n 0 11442255285.2312257918.87114437576917131713 @@ -321,7 +321,7 @@

    UF (Single Query Track)

    - + cvc5 0 9332399219.5372400555.87293316476919241924 @@ -330,7 +330,7 @@

    UF (Single Query Track)

    - + iProver - fixed2n 0 7722622410.4662533299.99277221955320851946 @@ -339,7 +339,7 @@

    UF (Single Query Track)

    - + iProver - fixedn 0 7612640175.2432545135.96276121654520961946 @@ -348,7 +348,7 @@

    UF (Single Query Track)

    - + veriT 0 6932573855.7012573844.546693069321642044 @@ -357,7 +357,7 @@

    UF (Single Query Track)

    - + z3n 0 4622179799.1232180444.1094625840423951390 @@ -366,7 +366,7 @@

    UF (Single Query Track)

    - + Yices2 0 3493028078.4013028073.0763493931025082508 @@ -375,7 +375,7 @@

    UF (Single Query Track)

    - + SMTInterpol 0 2273330026.3913138872.245227921826302525 @@ -384,7 +384,7 @@

    UF (Single Query Track)

    - + UltimateEliminator+MathSAT 0 015142.878638.23500028570 @@ -393,7 +393,7 @@

    UF (Single Query Track)

    - + iProver 13 8472506554.7252421794.69684721763020101850 @@ -413,7 +413,7 @@

    UF (Single Query Track)

    - + Vampire 0 45448189.66625763.98845445401523881662 @@ -422,7 +422,7 @@

    UF (Single Query Track)

    - + 2020-Vampiren 0 45251928.11824903.90745245201723881658 @@ -431,7 +431,7 @@

    UF (Single Query Track)

    - + Vampire - fixedn 0 44548189.40526663.21144544502423881664 @@ -440,7 +440,7 @@

    UF (Single Query Track)

    - + 2020-CVC4n 0 375290664.636293224.11837537509423881713 @@ -449,7 +449,7 @@

    UF (Single Query Track)

    - + iProver - fixed2n 0 219316546.101304276.107219219025023881946 @@ -458,7 +458,7 @@

    UF (Single Query Track)

    - + iProver 0 217299872.063289272.964217217025223881850 @@ -467,7 +467,7 @@

    UF (Single Query Track)

    - + iProver - fixedn 0 216316642.514306083.394216216025323881946 @@ -476,7 +476,7 @@

    UF (Single Query Track)

    - + cvc5 0 164440630.302441989.397164164030523881924 @@ -485,7 +485,7 @@

    UF (Single Query Track)

    - + z3n 0 58362832.59363361.1095858041123881390 @@ -494,7 +494,7 @@

    UF (Single Query Track)

    - + Yices2 0 39516147.715516147.8643939043023882508 @@ -503,7 +503,7 @@

    UF (Single Query Track)

    - + SMTInterpol 0 9568504.578527817.88199046023882525 @@ -512,7 +512,7 @@

    UF (Single Query Track)

    - + UltimateEliminator+MathSAT 0 02399.7531394.12100046923880 @@ -521,7 +521,7 @@

    UF (Single Query Track)

    - + veriT 0 0521861.826521856.52100046923882044 @@ -541,7 +541,7 @@

    UF (Single Query Track)

    - + cvc5 0 769187389.235187366.476769076914319451924 @@ -550,7 +550,7 @@

    UF (Single Query Track)

    - + 2020-CVC4n 0 769193420.595193494.752769076914319451713 @@ -559,7 +559,7 @@

    UF (Single Query Track)

    - + Vampire 0 740357061.405244681.339740074017219451662 @@ -568,7 +568,7 @@

    UF (Single Query Track)

    - + Vampire - fixedn 0 727357122.96245672.833727072718519451664 @@ -577,7 +577,7 @@

    UF (Single Query Track)

    - + 2020-Vampiren 0 726326777.679235129.714726072618619451658 @@ -586,7 +586,7 @@

    UF (Single Query Track)

    - + veriT 0 693281078.045281071.766693069321919452044 @@ -595,7 +595,7 @@

    UF (Single Query Track)

    - + iProver - fixed2n 0 553534664.364457823.885553055335919451946 @@ -604,7 +604,7 @@

    UF (Single Query Track)

    - + iProver - fixedn 0 545552332.729467852.568545054536719451946 @@ -613,7 +613,7 @@

    UF (Single Query Track)

    - + z3n 0 404493182.144493202.608404040450819451390 @@ -622,7 +622,7 @@

    UF (Single Query Track)

    - + Yices2 0 310740730.686740725.213310031060219452508 @@ -631,7 +631,7 @@

    UF (Single Query Track)

    - + SMTInterpol 0 218887807.013854480.675218021869419452525 @@ -640,7 +640,7 @@

    UF (Single Query Track)

    - + UltimateEliminator+MathSAT 0 04866.2992774.28200091219450 @@ -649,7 +649,7 @@

    UF (Single Query Track)

    - + iProver 13 630435620.516363150.98630063028219451850 @@ -669,7 +669,7 @@

    UF (Single Query Track)

    - + 2020-Vampiren 0 93152525.14347685.19393136756419261912 @@ -678,7 +678,7 @@

    UF (Single Query Track)

    - + Vampire 0 92853426.80448210.0392836456419291929 @@ -687,7 +687,7 @@

    UF (Single Query Track)

    - + Vampire - fixedn 0 91053440.15448267.45591035655419471932 @@ -696,7 +696,7 @@

    UF (Single Query Track)

    - + cvc5 0 65553511.29553476.8966551064522022202 @@ -705,7 +705,7 @@

    UF (Single Query Track)

    - + veriT 0 65053301.03953293.096650065022072128 @@ -714,7 +714,7 @@

    UF (Single Query Track)

    - + 2020-CVC4n 0 62954202.13254173.228629962022282228 @@ -723,7 +723,7 @@

    UF (Single Query Track)

    - + iProver - fixed2n 0 62662135.61455949.20362619643022312093 @@ -732,7 +732,7 @@

    UF (Single Query Track)

    - + iProver - fixedn 0 62061999.82956025.51562019542522372090 @@ -741,7 +741,7 @@

    UF (Single Query Track)

    - + z3n 0 42558880.07858877.5244255437124322396 @@ -750,7 +750,7 @@

    UF (Single Query Track)

    - + Yices2 0 28862205.66862198.4532883725125692569 @@ -759,7 +759,7 @@

    UF (Single Query Track)

    - + SMTInterpol 0 14266274.15865516.037142813427152702 @@ -768,7 +768,7 @@

    UF (Single Query Track)

    - + UltimateEliminator+MathSAT 0 015142.878638.23500028570 @@ -777,7 +777,7 @@

    UF (Single Query Track)

    - + iProver 4 68962355.88354821.5968919649321682020 @@ -801,7 +801,6 @@

    UF (Single Query Track)

    - + - diff --git a/archive/2021/results/uf-unsat-core.html b/archive/2021/results/uf-unsat-core.html index aea76222..2fe583db 100644 --- a/archive/2021/results/uf-unsat-core.html +++ b/archive/2021/results/uf-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UF (Unsat Core Track)

    Competition results for the UF - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    UF (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5-uccvc5-uc - - + + @@ -126,7 +126,7 @@

    UF (Unsat Core Track)

    - + cvc5-uc 0 475159 @@ -137,7 +137,7 @@

    UF (Unsat Core Track)

    - + 2020-CVC4-ucn 0 473850 @@ -148,7 +148,7 @@

    UF (Unsat Core Track)

    - + z3n 0 392891 @@ -159,7 +159,7 @@

    UF (Unsat Core Track)

    - + SMTInterpol 0 287674 @@ -170,7 +170,7 @@

    UF (Unsat Core Track)

    - + SMTInterpol-remus 0 177564 @@ -181,7 +181,7 @@

    UF (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -192,7 +192,7 @@

    UF (Unsat Core Track)

    - + Vampire 1 471070 @@ -214,7 +214,7 @@

    UF (Unsat Core Track)

    - + cvc5-uc 0 475159151599.385151599.004121 @@ -223,7 +223,7 @@

    UF (Unsat Core Track)

    - + 2020-CVC4-ucn 0 473850169173.994169175.564133 @@ -232,7 +232,7 @@

    UF (Unsat Core Track)

    - + z3n 0 392891341122.603341093.494202 @@ -241,7 +241,7 @@

    UF (Unsat Core Track)

    - + SMTInterpol 0 290423938780.208880134.638684 @@ -250,7 +250,7 @@

    UF (Unsat Core Track)

    - + SMTInterpol-remus 0 2717701676096.1351452966.131684 @@ -259,7 +259,7 @@

    UF (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 010956.2476970.1851 @@ -268,7 +268,7 @@

    UF (Unsat Core Track)

    - + Vampire 1 479804207360.488108122.55761 @@ -292,7 +292,6 @@

    UF (Unsat Core Track)

    - + - diff --git a/archive/2021/results/ufbv-cloud.html b/archive/2021/results/ufbv-cloud.html index 807e643d..86a037b6 100644 --- a/archive/2021/results/ufbv-cloud.html +++ b/archive/2021/results/ufbv-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFBV (Cloud Track)

    Competition results for the UFBV - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    UFBV (Cloud Track)

    - + cvc5-gg 0 151331.5981501511 @@ -126,7 +126,7 @@

    UFBV (Cloud Track)

    - + Par4n 0 019200.00001616 @@ -146,7 +146,7 @@

    UFBV (Cloud Track)

    - + cvc5-gg 0 00.00000161 @@ -155,7 +155,7 @@

    UFBV (Cloud Track)

    - + Par4n 0 00.000001616 @@ -175,7 +175,7 @@

    UFBV (Cloud Track)

    - + cvc5-gg 0 15131.59815015011 @@ -184,7 +184,7 @@

    UFBV (Cloud Track)

    - + Par4n 0 018000.000015116 @@ -204,7 +204,7 @@

    UFBV (Cloud Track)

    - + cvc5-gg 0 1399.4891301333 @@ -213,7 +213,7 @@

    UFBV (Cloud Track)

    - + Par4n 0 0384.00001616 @@ -237,7 +237,6 @@

    UFBV (Cloud Track)

    - + - diff --git a/archive/2021/results/ufbv-parallel.html b/archive/2021/results/ufbv-parallel.html index 9e73e199..367d0b9e 100644 --- a/archive/2021/results/ufbv-parallel.html +++ b/archive/2021/results/ufbv-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFBV (Parallel Track)

    Competition results for the UFBV - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    UFBV (Parallel Track)

    - + cvc5-gg 0 161334.611601611 @@ -126,7 +126,7 @@

    UFBV (Parallel Track)

    - + Par4n 0 019200.00001616 @@ -146,7 +146,7 @@

    UFBV (Parallel Track)

    - + cvc5-gg 0 00.00000171 @@ -155,7 +155,7 @@

    UFBV (Parallel Track)

    - + Par4n 0 00.000001716 @@ -175,7 +175,7 @@

    UFBV (Parallel Track)

    - + cvc5-gg 0 16134.6116016011 @@ -184,7 +184,7 @@

    UFBV (Parallel Track)

    - + Par4n 0 018000.000015216 @@ -204,7 +204,7 @@

    UFBV (Parallel Track)

    - + cvc5-gg 0 14101.1661401433 @@ -213,7 +213,7 @@

    UFBV (Parallel Track)

    - + Par4n 0 0384.00001616 @@ -237,7 +237,6 @@

    UFBV (Parallel Track)

    - + - diff --git a/archive/2021/results/ufbv-single-query.html b/archive/2021/results/ufbv-single-query.html index 8619e318..76937be5 100644 --- a/archive/2021/results/ufbv-single-query.html +++ b/archive/2021/results/ufbv-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFBV (Single Query Track)

    Competition results for the UFBV - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UFBV (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    UFBV (Single Query Track)

    - + 2018-Z3n 0 106 @@ -142,7 +142,7 @@

    UFBV (Single Query Track)

    - + 2020-z3n 0 106 @@ -153,7 +153,7 @@

    UFBV (Single Query Track)

    - + z3n 0 102 @@ -164,7 +164,7 @@

    UFBV (Single Query Track)

    - + cvc5 - fixedn 0 80 @@ -175,7 +175,7 @@

    UFBV (Single Query Track)

    - + cvc5 0 80 @@ -186,7 +186,7 @@

    UFBV (Single Query Track)

    - + 2020-CVC4n 0 33 @@ -197,7 +197,7 @@

    UFBV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 6 @@ -219,7 +219,7 @@

    UFBV (Single Query Track)

    - + 2018-Z3n 0 10645226.96645227.58510634723736 @@ -228,7 +228,7 @@

    UFBV (Single Query Track)

    - + 2020-z3n 0 10645816.44345816.40210636703736 @@ -237,7 +237,7 @@

    UFBV (Single Query Track)

    - + z3n 0 10250786.17750786.72510233694140 @@ -246,7 +246,7 @@

    UFBV (Single Query Track)

    - + cvc5 - fixedn 0 8056225.2856566.992802786330 @@ -255,7 +255,7 @@

    UFBV (Single Query Track)

    - + cvc5 0 8057483.01957782.06802786332 @@ -264,7 +264,7 @@

    UFBV (Single Query Track)

    - + 2020-CVC4n 0 3353945.94254288.793303311028 @@ -273,7 +273,7 @@

    UFBV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 613033.66612513.4016061374 @@ -293,7 +293,7 @@

    UFBV (Single Query Track)

    - + 2020-z3n 0 361170.3741169.74836360010736 @@ -302,7 +302,7 @@

    UFBV (Single Query Track)

    - + 2018-Z3n 0 342971.382971.55834340210736 @@ -311,7 +311,7 @@

    UFBV (Single Query Track)

    - + z3n 0 334108.4344108.54433330310740 @@ -320,7 +320,7 @@

    UFBV (Single Query Track)

    - + cvc5 - fixedn 0 224929.64825141.4972203410730 @@ -329,7 +329,7 @@

    UFBV (Single Query Track)

    - + cvc5 0 224996.07925199.1462203410732 @@ -338,7 +338,7 @@

    UFBV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 02098.5362002.38000361074 @@ -347,7 +347,7 @@

    UFBV (Single Query Track)

    - + 2020-CVC4n 0 022461.35622587.6720003610728 @@ -367,7 +367,7 @@

    UFBV (Single Query Track)

    - + cvc5 0 788170.968217.4337807865932 @@ -376,7 +376,7 @@

    UFBV (Single Query Track)

    - + cvc5 - fixedn 0 788178.7998233.2927807865930 @@ -385,7 +385,7 @@

    UFBV (Single Query Track)

    - + 2018-Z3n 0 7214723.81514723.83872072125936 @@ -394,7 +394,7 @@

    UFBV (Single Query Track)

    - + 2020-z3n 0 7017360.99217361.08570070145936 @@ -403,7 +403,7 @@

    UFBV (Single Query Track)

    - + z3n 0 6919939.43419939.80569069155940 @@ -412,7 +412,7 @@

    UFBV (Single Query Track)

    - + 2020-CVC4n 0 339254.8829328.98433033515928 @@ -421,7 +421,7 @@

    UFBV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 63424.5383080.660678594 @@ -441,7 +441,7 @@

    UFBV (Single Query Track)

    - + 2018-Z3n 0 941257.6071257.6229426684949 @@ -450,7 +450,7 @@

    UFBV (Single Query Track)

    - + 2020-z3n 0 941297.7931296.7779431634949 @@ -459,7 +459,7 @@

    UFBV (Single Query Track)

    - + z3n 0 891406.0191406.1258929605454 @@ -468,7 +468,7 @@

    UFBV (Single Query Track)

    - + cvc5 0 612129.9052129.912610618282 @@ -477,7 +477,7 @@

    UFBV (Single Query Track)

    - + cvc5 - fixedn 0 612130.1782130.178610618282 @@ -486,7 +486,7 @@

    UFBV (Single Query Track)

    - + 2020-CVC4n 0 191850.1781850.2681901912471 @@ -495,7 +495,7 @@

    UFBV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 61356.8881003.21660613711 @@ -519,7 +519,6 @@

    UFBV (Single Query Track)

    - + - diff --git a/archive/2021/results/ufbvfp-single-query.html b/archive/2021/results/ufbvfp-single-query.html index dc742449..e341d2d0 100644 --- a/archive/2021/results/ufbvfp-single-query.html +++ b/archive/2021/results/ufbvfp-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFBVFP (Single Query Track)

    Competition results for the UFBVFP - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UFBVFP (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5— - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    UFBVFP (Single Query Track)

    - + cvc5 - fixedn 0 1 @@ -142,7 +142,7 @@

    UFBVFP (Single Query Track)

    - + cvc5 0 1 @@ -153,7 +153,7 @@

    UFBVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -175,7 +175,7 @@

    UFBVFP (Single Query Track)

    - + cvc5 - fixedn 0 17.7937.79210110 @@ -184,7 +184,7 @@

    UFBVFP (Single Query Track)

    - + cvc5 0 17.998.02410110 @@ -193,7 +193,7 @@

    UFBVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 08.7696.27700020 @@ -213,7 +213,7 @@

    UFBVFP (Single Query Track)

    - + cvc5 0 00.00.0000020 @@ -222,7 +222,7 @@

    UFBVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.0000020 @@ -231,7 +231,7 @@

    UFBVFP (Single Query Track)

    - + cvc5 - fixedn 0 00.00.0000020 @@ -251,7 +251,7 @@

    UFBVFP (Single Query Track)

    - + cvc5 - fixedn 0 10.2050.205101010 @@ -260,7 +260,7 @@

    UFBVFP (Single Query Track)

    - + cvc5 0 10.2080.208101010 @@ -269,7 +269,7 @@

    UFBVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 04.4082.661000110 @@ -289,7 +289,7 @@

    UFBVFP (Single Query Track)

    - + cvc5 - fixedn 0 17.7937.79210110 @@ -298,7 +298,7 @@

    UFBVFP (Single Query Track)

    - + cvc5 0 17.998.02410110 @@ -307,7 +307,7 @@

    UFBVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 08.7696.27700020 @@ -331,7 +331,6 @@

    UFBVFP (Single Query Track)

    - + - diff --git a/archive/2021/results/ufbvlia-single-query.html b/archive/2021/results/ufbvlia-single-query.html index 398d2712..d7eb264b 100644 --- a/archive/2021/results/ufbvlia-single-query.html +++ b/archive/2021/results/ufbvlia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFBVLIA (Single Query Track)

    Competition results for the UFBVLIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UFBVLIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5— - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    UFBVLIA (Single Query Track)

    - + cvc5 0 2 @@ -142,7 +142,7 @@

    UFBVLIA (Single Query Track)

    - + cvc5 - fixedn 0 1 @@ -153,7 +153,7 @@

    UFBVLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -175,7 +175,7 @@

    UFBVLIA (Single Query Track)

    - + cvc5 0 2247664.822247744.016202206206 @@ -184,7 +184,7 @@

    UFBVLIA (Single Query Track)

    - + cvc5 - fixedn 0 1248402.129248401.089101207207 @@ -193,7 +193,7 @@

    UFBVLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0197671.222194801.978000208136 @@ -213,7 +213,7 @@

    UFBVLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 06529.2915811.13300020188136 @@ -222,7 +222,7 @@

    UFBVLIA (Single Query Track)

    - + cvc5 0 024000.024000.000020188206 @@ -231,7 +231,7 @@

    UFBVLIA (Single Query Track)

    - + cvc5 - fixedn 0 024000.024000.000020188207 @@ -251,7 +251,7 @@

    UFBVLIA (Single Query Track)

    - + cvc5 0 27664.8227744.0162026200206 @@ -260,7 +260,7 @@

    UFBVLIA (Single Query Track)

    - + cvc5 - fixedn 0 18402.1298401.0891017200207 @@ -269,7 +269,7 @@

    UFBVLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 02825.1112566.6730008200136 @@ -289,7 +289,7 @@

    UFBVLIA (Single Query Track)

    - + cvc5 - fixedn 0 14970.1294969.089101207207 @@ -298,7 +298,7 @@

    UFBVLIA (Single Query Track)

    - + cvc5 0 14970.1184969.09101207207 @@ -307,7 +307,7 @@

    UFBVLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 04992.04992.0000208208 @@ -331,7 +331,6 @@

    UFBVLIA (Single Query Track)

    - + - diff --git a/archive/2021/results/ufdt-cloud.html b/archive/2021/results/ufdt-cloud.html index ebcf34f8..f47b0a77 100644 --- a/archive/2021/results/ufdt-cloud.html +++ b/archive/2021/results/ufdt-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFDT (Cloud Track)

    Competition results for the UFDT - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    UFDT (Cloud Track)

    - + Vampire 0 814284.26585388 @@ -126,7 +126,7 @@

    UFDT (Cloud Track)

    - + cvc5-gg 0 210804.151202149 @@ -146,7 +146,7 @@

    UFDT (Cloud Track)

    - + Vampire 0 54585.9875500118 @@ -155,7 +155,7 @@

    UFDT (Cloud Track)

    - + cvc5-gg 0 02400.00005119 @@ -175,7 +175,7 @@

    UFDT (Cloud Track)

    - + Vampire 0 32498.2783032118 @@ -184,7 +184,7 @@

    UFDT (Cloud Track)

    - + cvc5-gg 0 23604.1512023119 @@ -204,7 +204,7 @@

    UFDT (Cloud Track)

    - + cvc5-gg 0 2220.151202149 @@ -213,7 +213,7 @@

    UFDT (Cloud Track)

    - + Vampire 0 2339.0132021414 @@ -237,7 +237,6 @@

    UFDT (Cloud Track)

    - + - diff --git a/archive/2021/results/ufdt-parallel.html b/archive/2021/results/ufdt-parallel.html index 7996a6b7..32474974 100644 --- a/archive/2021/results/ufdt-parallel.html +++ b/archive/2021/results/ufdt-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFDT (Parallel Track)

    Competition results for the UFDT - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    UFDT (Parallel Track)

    - + Vampire 0 911625.85395488 @@ -126,7 +126,7 @@

    UFDT (Parallel Track)

    - + cvc5-gg 0 22404.065202152 @@ -146,7 +146,7 @@

    UFDT (Parallel Track)

    - + Vampire 0 51947.8955500128 @@ -155,7 +155,7 @@

    UFDT (Parallel Track)

    - + cvc5-gg 0 00.00005122 @@ -175,7 +175,7 @@

    UFDT (Parallel Track)

    - + Vampire 0 42477.9584042118 @@ -184,7 +184,7 @@

    UFDT (Parallel Track)

    - + cvc5-gg 0 24.0652024112 @@ -204,7 +204,7 @@

    UFDT (Parallel Track)

    - + Vampire 0 3339.0343031414 @@ -213,7 +213,7 @@

    UFDT (Parallel Track)

    - + cvc5-gg 0 252.065202152 @@ -237,7 +237,6 @@

    UFDT (Parallel Track)

    - + - diff --git a/archive/2021/results/ufdt-single-query.html b/archive/2021/results/ufdt-single-query.html index 03abaef1..01fd3c02 100644 --- a/archive/2021/results/ufdt-single-query.html +++ b/archive/2021/results/ufdt-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFDT (Single Query Track)

    Competition results for the UFDT - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UFDT (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    UFDT (Single Query Track)

    - + 2020-CVC4n 0 531 @@ -142,7 +142,7 @@

    UFDT (Single Query Track)

    - + cvc5 0 449 @@ -153,7 +153,7 @@

    UFDT (Single Query Track)

    - + 2020-Vampiren 0 351 @@ -164,7 +164,7 @@

    UFDT (Single Query Track)

    - + Vampire 0 350 @@ -175,7 +175,7 @@

    UFDT (Single Query Track)

    - + Vampire - fixedn 0 346 @@ -186,7 +186,7 @@

    UFDT (Single Query Track)

    - + iProver - fixed2n 0 268 @@ -197,7 +197,7 @@

    UFDT (Single Query Track)

    - + iProver - fixedn 0 263 @@ -208,7 +208,7 @@

    UFDT (Single Query Track)

    - + iProver 0 259 @@ -230,7 +230,7 @@

    UFDT (Single Query Track)

    - + 2020-CVC4n 0 5311304237.9631306758.1153113239910211021 @@ -239,7 +239,7 @@

    UFDT (Single Query Track)

    - + cvc5 0 4491366163.7151367329.0854497237711031103 @@ -248,7 +248,7 @@

    UFDT (Single Query Track)

    - + 2020-Vampiren 0 3891544406.01425966.7633892036911631153 @@ -257,7 +257,7 @@

    UFDT (Single Query Track)

    - + Vampire 0 3781521090.1531434289.007378137711741173 @@ -266,7 +266,7 @@

    UFDT (Single Query Track)

    - + Vampire - fixedn 0 3751514940.0131431091.467375237311771168 @@ -275,7 +275,7 @@

    UFDT (Single Query Track)

    - + iProver - fixed2n 0 2821590552.9371534913.039282028212701180 @@ -284,7 +284,7 @@

    UFDT (Single Query Track)

    - + iProver - fixedn 0 2791594533.961539221.739279027912731164 @@ -293,7 +293,7 @@

    UFDT (Single Query Track)

    - + iProver 0 2771596045.5341540077.02277027712751176 @@ -313,7 +313,7 @@

    UFDT (Single Query Track)

    - + 2020-CVC4n 0 13262384.73364695.2681321320214181021 @@ -322,7 +322,7 @@

    UFDT (Single Query Track)

    - + cvc5 0 72106581.715107622.998727206214181103 @@ -331,7 +331,7 @@

    UFDT (Single Query Track)

    - + 2020-Vampiren 0 20218534.53156827.7422020011414181153 @@ -340,7 +340,7 @@

    UFDT (Single Query Track)

    - + Vampire - fixedn 0 2166612.86160558.01722013214181168 @@ -349,7 +349,7 @@

    UFDT (Single Query Track)

    - + Vampire 0 1167282.27160659.2511013314181173 @@ -358,7 +358,7 @@

    UFDT (Single Query Track)

    - + iProver - fixedn 0 0153618.758153607.26700013414181164 @@ -367,7 +367,7 @@

    UFDT (Single Query Track)

    - + iProver 0 0153620.886153607.75400013414181176 @@ -376,7 +376,7 @@

    UFDT (Single Query Track)

    - + iProver - fixed2n 0 0153620.777153607.75600013414181180 @@ -396,7 +396,7 @@

    UFDT (Single Query Track)

    - + 2020-CVC4n 0 39980253.2380462.84239903995111021021 @@ -405,7 +405,7 @@

    UFDT (Single Query Track)

    - + cvc5 0 37797982.098106.08737703777311021103 @@ -414,7 +414,7 @@

    UFDT (Single Query Track)

    - + Vampire 0 377185001.523112031.83737703777311021173 @@ -423,7 +423,7 @@

    UFDT (Single Query Track)

    - + Vampire - fixedn 0 373186727.153108933.4537303737711021168 @@ -432,7 +432,7 @@

    UFDT (Single Query Track)

    - + 2020-Vampiren 0 369164271.47107539.02136903698111021153 @@ -441,7 +441,7 @@

    UFDT (Single Query Track)

    - + iProver - fixed2n 0 282275300.38220323.745282028216811021180 @@ -450,7 +450,7 @@

    UFDT (Single Query Track)

    - + iProver - fixedn 0 279279315.202224014.472279027917111021164 @@ -459,7 +459,7 @@

    UFDT (Single Query Track)

    - + iProver 0 277280825.008226192.82277027717311021176 @@ -479,7 +479,7 @@

    UFDT (Single Query Track)

    - + 2020-CVC4n 0 32129873.14229871.066321431712311231 @@ -488,7 +488,7 @@

    UFDT (Single Query Track)

    - + cvc5 0 31829905.61729905.614318431412341234 @@ -497,7 +497,7 @@

    UFDT (Single Query Track)

    - + 2020-Vampiren 0 28033832.86131304.922280028012721267 @@ -506,7 +506,7 @@

    UFDT (Single Query Track)

    - + Vampire 0 27834256.63931540.318278027812741274 @@ -515,7 +515,7 @@

    UFDT (Single Query Track)

    - + Vampire - fixedn 0 27434236.54731534.941274027412781274 @@ -524,7 +524,7 @@

    UFDT (Single Query Track)

    - + iProver - fixed2n 0 20336583.75533403.865203020313491260 @@ -533,7 +533,7 @@

    UFDT (Single Query Track)

    - + iProver 0 20036462.11333423.097200020013521255 @@ -542,7 +542,7 @@

    UFDT (Single Query Track)

    - + iProver - fixedn 0 19836381.66733438.875198019813541245 @@ -566,7 +566,6 @@

    UFDT (Single Query Track)

    - + - diff --git a/archive/2021/results/ufdt-unsat-core.html b/archive/2021/results/ufdt-unsat-core.html index 4c216354..5d5954e9 100644 --- a/archive/2021/results/ufdt-unsat-core.html +++ b/archive/2021/results/ufdt-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFDT (Unsat Core Track)

    Competition results for the UFDT - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    UFDT (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5-ucVampire - - + + @@ -126,7 +126,7 @@

    UFDT (Unsat Core Track)

    - + 2020-CVC4-ucn 0 302833 @@ -137,7 +137,7 @@

    UFDT (Unsat Core Track)

    - + cvc5-uc 0 295622 @@ -148,7 +148,7 @@

    UFDT (Unsat Core Track)

    - + Vampire 0 292489 @@ -170,7 +170,7 @@

    UFDT (Unsat Core Track)

    - + 2020-CVC4-ucn 0 30283318768.88218768.48815 @@ -179,7 +179,7 @@

    UFDT (Unsat Core Track)

    - + Vampire 0 29671564854.66631920.78917 @@ -188,7 +188,7 @@

    UFDT (Unsat Core Track)

    - + cvc5-uc 0 29562243771.40743770.48135 @@ -212,7 +212,6 @@

    UFDT (Unsat Core Track)

    - + - diff --git a/archive/2021/results/ufdtlia-cloud.html b/archive/2021/results/ufdtlia-cloud.html index e3f7d7e9..26276515 100644 --- a/archive/2021/results/ufdtlia-cloud.html +++ b/archive/2021/results/ufdtlia-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFDTLIA (Cloud Track)

    Competition results for the UFDTLIA - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    UFDTLIA (Cloud Track)

    - + Vampire 0 114401.0061011212 @@ -126,7 +126,7 @@

    UFDTLIA (Cloud Track)

    - + cvc5-gg 0 019200.00001616 @@ -146,7 +146,7 @@

    UFDTLIA (Cloud Track)

    - + cvc5-gg 0 00.000001616 @@ -155,7 +155,7 @@

    UFDTLIA (Cloud Track)

    - + Vampire 0 00.000001612 @@ -175,7 +175,7 @@

    UFDTLIA (Cloud Track)

    - + Vampire 0 11.00610101512 @@ -184,7 +184,7 @@

    UFDTLIA (Cloud Track)

    - + cvc5-gg 0 01200.000011516 @@ -204,7 +204,7 @@

    UFDTLIA (Cloud Track)

    - + Vampire 0 1289.0061011212 @@ -213,7 +213,7 @@

    UFDTLIA (Cloud Track)

    - + cvc5-gg 0 0384.00001616 @@ -237,7 +237,6 @@

    UFDTLIA (Cloud Track)

    - + - diff --git a/archive/2021/results/ufdtlia-parallel.html b/archive/2021/results/ufdtlia-parallel.html index ae0d9df4..1f433547 100644 --- a/archive/2021/results/ufdtlia-parallel.html +++ b/archive/2021/results/ufdtlia-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFDTLIA (Parallel Track)

    Competition results for the UFDTLIA - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    UFDTLIA (Parallel Track)

    - + Vampire 0 119200.041011616 @@ -126,7 +126,7 @@

    UFDTLIA (Parallel Track)

    - + cvc5-gg 0 018000.00001715 @@ -146,7 +146,7 @@

    UFDTLIA (Parallel Track)

    - + cvc5-gg 0 00.000001715 @@ -155,7 +155,7 @@

    UFDTLIA (Parallel Track)

    - + Vampire 0 00.000001716 @@ -175,7 +175,7 @@

    UFDTLIA (Parallel Track)

    - + Vampire 0 10.0410101616 @@ -184,7 +184,7 @@

    UFDTLIA (Parallel Track)

    - + cvc5-gg 0 01200.000011615 @@ -204,7 +204,7 @@

    UFDTLIA (Parallel Track)

    - + Vampire 0 1384.041011616 @@ -213,7 +213,7 @@

    UFDTLIA (Parallel Track)

    - + cvc5-gg 0 0360.00001715 @@ -237,7 +237,6 @@

    UFDTLIA (Parallel Track)

    - + - diff --git a/archive/2021/results/ufdtlia-single-query.html b/archive/2021/results/ufdtlia-single-query.html index f180ba6f..da84252b 100644 --- a/archive/2021/results/ufdtlia-single-query.html +++ b/archive/2021/results/ufdtlia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFDTLIA (Single Query Track)

    Competition results for the UFDTLIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UFDTLIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) VampireVampire— - - + + Vampire - - + + Vampire - + @@ -131,7 +131,7 @@

    UFDTLIA (Single Query Track)

    - + 2019-CVC4n 0 197 @@ -142,7 +142,7 @@

    UFDTLIA (Single Query Track)

    - + 2020-CVC4n 0 196 @@ -153,7 +153,7 @@

    UFDTLIA (Single Query Track)

    - + cvc5 - fixedn 0 144 @@ -164,7 +164,7 @@

    UFDTLIA (Single Query Track)

    - + Vampire 0 133 @@ -175,7 +175,7 @@

    UFDTLIA (Single Query Track)

    - + cvc5 0 132 @@ -186,7 +186,7 @@

    UFDTLIA (Single Query Track)

    - + Vampire - fixedn 0 132 @@ -197,7 +197,7 @@

    UFDTLIA (Single Query Track)

    - + 2020-Vampiren 0 124 @@ -208,7 +208,7 @@

    UFDTLIA (Single Query Track)

    - + iProver 0 28 @@ -230,7 +230,7 @@

    UFDTLIA (Single Query Track)

    - + 2019-CVC4n 0 197202450.312204473.48619701978080 @@ -239,7 +239,7 @@

    UFDTLIA (Single Query Track)

    - + 2020-CVC4n 0 196150473.582151566.54619601968181 @@ -248,7 +248,7 @@

    UFDTLIA (Single Query Track)

    - + cvc5 - fixedn 0 144199916.39200869.2621440144133133 @@ -257,7 +257,7 @@

    UFDTLIA (Single Query Track)

    - + Vampire 0 133180811.176174122.2271330133144144 @@ -266,7 +266,7 @@

    UFDTLIA (Single Query Track)

    - + Vampire - fixedn 0 133178001.879174334.3231330133144144 @@ -275,7 +275,7 @@

    UFDTLIA (Single Query Track)

    - + cvc5 0 132210013.467210877.3871320132145145 @@ -284,7 +284,7 @@

    UFDTLIA (Single Query Track)

    - + 2020-Vampiren 0 126193159.193182562.8451260126151149 @@ -293,7 +293,7 @@

    UFDTLIA (Single Query Track)

    - + iProver 0 29304847.548299441.56929029248248 @@ -313,7 +313,7 @@

    UFDTLIA (Single Query Track)

    - + 2019-CVC4n 0 00.00.0000027780 @@ -322,7 +322,7 @@

    UFDTLIA (Single Query Track)

    - + 2020-Vampiren 0 00.00.00000277149 @@ -331,7 +331,7 @@

    UFDTLIA (Single Query Track)

    - + 2020-CVC4n 0 00.00.0000027781 @@ -340,7 +340,7 @@

    UFDTLIA (Single Query Track)

    - + cvc5 0 00.00.00000277145 @@ -349,7 +349,7 @@

    UFDTLIA (Single Query Track)

    - + iProver 0 00.00.00000277248 @@ -358,7 +358,7 @@

    UFDTLIA (Single Query Track)

    - + Vampire 0 00.00.00000277144 @@ -367,7 +367,7 @@

    UFDTLIA (Single Query Track)

    - + Vampire - fixedn 0 00.00.00000277144 @@ -376,7 +376,7 @@

    UFDTLIA (Single Query Track)

    - + cvc5 - fixedn 0 00.00.00000277133 @@ -396,7 +396,7 @@

    UFDTLIA (Single Query Track)

    - + 2019-CVC4n 0 197118450.312120473.4861970197107080 @@ -405,7 +405,7 @@

    UFDTLIA (Single Query Track)

    - + 2020-CVC4n 0 19666473.58267566.5461960196117081 @@ -414,7 +414,7 @@

    UFDTLIA (Single Query Track)

    - + cvc5 - fixedn 0 144115916.39116869.26214401446370133 @@ -423,7 +423,7 @@

    UFDTLIA (Single Query Track)

    - + Vampire 0 13396811.17690122.22713301337470144 @@ -432,7 +432,7 @@

    UFDTLIA (Single Query Track)

    - + Vampire - fixedn 0 13394001.87990334.32313301337470144 @@ -441,7 +441,7 @@

    UFDTLIA (Single Query Track)

    - + cvc5 0 132126013.467126877.38713201327570145 @@ -450,7 +450,7 @@

    UFDTLIA (Single Query Track)

    - + 2020-Vampiren 0 126109159.19398562.84512601268170149 @@ -459,7 +459,7 @@

    UFDTLIA (Single Query Track)

    - + iProver 0 29220847.548215441.5692902917870248 @@ -479,7 +479,7 @@

    UFDTLIA (Single Query Track)

    - + Vampire 0 1204009.3123897.0451200120157157 @@ -488,7 +488,7 @@

    UFDTLIA (Single Query Track)

    - + Vampire - fixedn 0 1193987.4323899.2771190119158158 @@ -497,7 +497,7 @@

    UFDTLIA (Single Query Track)

    - + 2020-Vampiren 0 1126855.1535564.0711120112165164 @@ -506,7 +506,7 @@

    UFDTLIA (Single Query Track)

    - + 2020-CVC4n 0 276037.9186037.92127027250250 @@ -515,7 +515,7 @@

    UFDTLIA (Single Query Track)

    - + 2019-CVC4n 0 266053.8646053.85426026251251 @@ -524,7 +524,7 @@

    UFDTLIA (Single Query Track)

    - + cvc5 - fixedn 0 246097.846092.44124024253253 @@ -533,7 +533,7 @@

    UFDTLIA (Single Query Track)

    - + cvc5 0 246092.4576092.44924024253253 @@ -542,7 +542,7 @@

    UFDTLIA (Single Query Track)

    - + iProver 0 186630.486327.10618018259259 @@ -566,7 +566,6 @@

    UFDTLIA (Single Query Track)

    - + - diff --git a/archive/2021/results/ufdtlira-cloud.html b/archive/2021/results/ufdtlira-cloud.html index 7865282a..bf9f0a8e 100644 --- a/archive/2021/results/ufdtlira-cloud.html +++ b/archive/2021/results/ufdtlira-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFDTLIRA (Cloud Track)

    Competition results for the UFDTLIRA - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    UFDTLIRA (Cloud Track)

    - + cvc5-gg 0 1632.0041601600 @@ -126,7 +126,7 @@

    UFDTLIRA (Cloud Track)

    - + Vampire 0 116857.1641011414 @@ -146,7 +146,7 @@

    UFDTLIRA (Cloud Track)

    - + cvc5-gg 0 00.00000160 @@ -155,7 +155,7 @@

    UFDTLIRA (Cloud Track)

    - + Vampire 0 00.000001614 @@ -175,7 +175,7 @@

    UFDTLIRA (Cloud Track)

    - + cvc5-gg 0 1632.00416016000 @@ -184,7 +184,7 @@

    UFDTLIRA (Cloud Track)

    - + Vampire 0 116857.16410114114 @@ -204,7 +204,7 @@

    UFDTLIRA (Cloud Track)

    - + cvc5-gg 0 1632.0041601600 @@ -213,7 +213,7 @@

    UFDTLIRA (Cloud Track)

    - + Vampire 0 0360.00001515 @@ -237,7 +237,6 @@

    UFDTLIRA (Cloud Track)

    - + - diff --git a/archive/2021/results/ufdtlira-parallel.html b/archive/2021/results/ufdtlira-parallel.html index 62980413..3f383224 100644 --- a/archive/2021/results/ufdtlira-parallel.html +++ b/archive/2021/results/ufdtlira-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFDTLIRA (Parallel Track)

    Competition results for the UFDTLIRA - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    UFDTLIRA (Parallel Track)

    - + cvc5-gg 0 1733.5621701700 @@ -126,7 +126,7 @@

    UFDTLIRA (Parallel Track)

    - + Vampire 0 119281.9321011616 @@ -146,7 +146,7 @@

    UFDTLIRA (Parallel Track)

    - + cvc5-gg 0 00.00000170 @@ -155,7 +155,7 @@

    UFDTLIRA (Parallel Track)

    - + Vampire 0 00.000001716 @@ -175,7 +175,7 @@

    UFDTLIRA (Parallel Track)

    - + cvc5-gg 0 1733.56217017000 @@ -184,7 +184,7 @@

    UFDTLIRA (Parallel Track)

    - + Vampire 0 119281.93210116016 @@ -204,7 +204,7 @@

    UFDTLIRA (Parallel Track)

    - + cvc5-gg 0 1733.5621701700 @@ -213,7 +213,7 @@

    UFDTLIRA (Parallel Track)

    - + Vampire 0 0408.00001717 @@ -237,7 +237,6 @@

    UFDTLIRA (Parallel Track)

    - + - diff --git a/archive/2021/results/ufdtlira-single-query.html b/archive/2021/results/ufdtlira-single-query.html index 1521b932..aae884d0 100644 --- a/archive/2021/results/ufdtlira-single-query.html +++ b/archive/2021/results/ufdtlira-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFDTLIRA (Single Query Track)

    Competition results for the UFDTLIRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UFDTLIRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    UFDTLIRA (Single Query Track)

    - + 2020-CVC4n 0 2948 @@ -142,7 +142,7 @@

    UFDTLIRA (Single Query Track)

    - + cvc5 0 2948 @@ -153,7 +153,7 @@

    UFDTLIRA (Single Query Track)

    - + cvc5 - fixedn 0 2948 @@ -164,7 +164,7 @@

    UFDTLIRA (Single Query Track)

    - + Vampire 0 2904 @@ -175,7 +175,7 @@

    UFDTLIRA (Single Query Track)

    - + Vampire - fixedn 0 2885 @@ -186,7 +186,7 @@

    UFDTLIRA (Single Query Track)

    - + iProver 0 2550 @@ -197,7 +197,7 @@

    UFDTLIRA (Single Query Track)

    - + 2020-Vampiren 14 3243 @@ -219,7 +219,7 @@

    UFDTLIRA (Single Query Track)

    - + 2020-CVC4n 0 29486188.3486189.63529483429149525 @@ -228,7 +228,7 @@

    UFDTLIRA (Single Query Track)

    - + cvc5 0 29489956.1359954.28429483429149528 @@ -237,7 +237,7 @@

    UFDTLIRA (Single Query Track)

    - + cvc5 - fixedn 0 29489965.4869964.35229483429149528 @@ -246,7 +246,7 @@

    UFDTLIRA (Single Query Track)

    - + Vampire 0 29191491739.1041256239.2472919102909981980 @@ -255,7 +255,7 @@

    UFDTLIRA (Single Query Track)

    - + Vampire - fixedn 0 28981449575.1741252173.48528981028881002980 @@ -264,7 +264,7 @@

    UFDTLIRA (Single Query Track)

    - + iProver 0 25671664952.1911614173.20125670256713331329 @@ -273,7 +273,7 @@

    UFDTLIRA (Single Query Track)

    - + 2020-Vampiren 14 3253876036.06768647.166325343249647597 @@ -293,7 +293,7 @@

    UFDTLIRA (Single Query Track)

    - + cvc5 0 340.750.72734340538618 @@ -302,7 +302,7 @@

    UFDTLIRA (Single Query Track)

    - + cvc5 - fixedn 0 340.7720.77734340538618 @@ -311,7 +311,7 @@

    UFDTLIRA (Single Query Track)

    - + 2020-CVC4n 0 340.6870.78434340538615 @@ -320,7 +320,7 @@

    UFDTLIRA (Single Query Track)

    - + Vampire 0 1041962.32735776.5210100293861980 @@ -329,7 +329,7 @@

    UFDTLIRA (Single Query Track)

    - + Vampire - fixedn 0 1038204.9135778.45710100293861980 @@ -338,7 +338,7 @@

    UFDTLIRA (Single Query Track)

    - + 2020-Vampiren 0 425986.6925439.979440353861597 @@ -347,7 +347,7 @@

    UFDTLIRA (Single Query Track)

    - + iProver 0 046800.046800.00003938611329 @@ -367,7 +367,7 @@

    UFDTLIRA (Single Query Track)

    - + 2020-CVC4n 0 29146168.6776169.6662914029143875995 @@ -376,7 +376,7 @@

    UFDTLIRA (Single Query Track)

    - + cvc5 0 29147536.217534.6522914029143875998 @@ -385,7 +385,7 @@

    UFDTLIRA (Single Query Track)

    - + cvc5 - fixedn 0 29147545.3457544.2292914029143875998 @@ -394,7 +394,7 @@

    UFDTLIRA (Single Query Track)

    - + Vampire 0 2909759775.747548484.907290902909392599980 @@ -403,7 +403,7 @@

    UFDTLIRA (Single Query Track)

    - + Vampire - fixedn 0 2888739370.264544395.028288802888413599980 @@ -412,7 +412,7 @@

    UFDTLIRA (Single Query Track)

    - + iProver 0 2567946152.191895373.2012567025677345991329 @@ -421,7 +421,7 @@

    UFDTLIRA (Single Query Track)

    - + 2020-Vampiren 14 3249178049.3771207.18832490324952599597 @@ -441,7 +441,7 @@

    UFDTLIRA (Single Query Track)

    - + 2020-CVC4n 0 2948308.348309.63529483429149525 @@ -450,7 +450,7 @@

    UFDTLIRA (Single Query Track)

    - + cvc5 0 2947396.826394.87729473429139539 @@ -459,7 +459,7 @@

    UFDTLIRA (Single Query Track)

    - + cvc5 - fixedn 0 2947398.851397.64229473429139539 @@ -468,7 +468,7 @@

    UFDTLIRA (Single Query Track)

    - + iProver 0 250540967.20436814.13425050250513951395 @@ -477,7 +477,7 @@

    UFDTLIRA (Single Query Track)

    - + Vampire 0 231439896.45439015.38423140231415861586 @@ -486,7 +486,7 @@

    UFDTLIRA (Single Query Track)

    - + Vampire - fixedn 0 229639988.0439089.67822960229616041588 @@ -495,7 +495,7 @@

    UFDTLIRA (Single Query Track)

    - + 2020-Vampiren 13 276443781.78836126.87427640276411361097 @@ -519,7 +519,6 @@

    UFDTLIRA (Single Query Track)

    - + - diff --git a/archive/2021/results/ufdtlira-unsat-core.html b/archive/2021/results/ufdtlira-unsat-core.html index cab5292d..b9505e07 100644 --- a/archive/2021/results/ufdtlira-unsat-core.html +++ b/archive/2021/results/ufdtlira-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFDTLIRA (Unsat Core Track)

    Competition results for the UFDTLIRA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    UFDTLIRA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5-uccvc5-uc - - + + @@ -126,7 +126,7 @@

    UFDTLIRA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 76554 @@ -137,7 +137,7 @@

    UFDTLIRA (Unsat Core Track)

    - + cvc5-uc 0 76484 @@ -148,7 +148,7 @@

    UFDTLIRA (Unsat Core Track)

    - + Vampire 0 17066 @@ -170,7 +170,7 @@

    UFDTLIRA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 76554141.963147.2040 @@ -179,7 +179,7 @@

    UFDTLIRA (Unsat Core Track)

    - + cvc5-uc 0 76484148.685149.2020 @@ -188,7 +188,7 @@

    UFDTLIRA (Unsat Core Track)

    - + Vampire 0 17066276216.299101997.6829 @@ -212,7 +212,6 @@

    UFDTLIRA (Unsat Core Track)

    - + - diff --git a/archive/2021/results/ufdtnia-incremental.html b/archive/2021/results/ufdtnia-incremental.html index 9a2e9dcd..94d28fff 100644 --- a/archive/2021/results/ufdtnia-incremental.html +++ b/archive/2021/results/ufdtnia-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFDTNIA (Incremental Track)

    Competition results for the UFDTNIA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    UFDTNIA (Incremental Track)

    Parallel Performance cvc5-inc - - + + @@ -124,7 +124,7 @@

    UFDTNIA (Incremental Track)

    - + cvc5-inc 0 52818263.20518262.98321814 @@ -148,7 +148,6 @@

    UFDTNIA (Incremental Track)

    - + - diff --git a/archive/2021/results/ufdtnia-single-query.html b/archive/2021/results/ufdtnia-single-query.html index d1772e2b..8ac1046a 100644 --- a/archive/2021/results/ufdtnia-single-query.html +++ b/archive/2021/results/ufdtnia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFDTNIA (Single Query Track)

    Competition results for the UFDTNIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UFDTNIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) VampireVampire— - - + + Vampire - - + + Vampire - + @@ -131,7 +131,7 @@

    UFDTNIA (Single Query Track)

    - + 2020-Vampiren 0 1 @@ -142,7 +142,7 @@

    UFDTNIA (Single Query Track)

    - + Vampire 0 1 @@ -153,7 +153,7 @@

    UFDTNIA (Single Query Track)

    - + Vampire - fixedn 0 1 @@ -164,7 +164,7 @@

    UFDTNIA (Single Query Track)

    - + cvc5 0 1 @@ -175,7 +175,7 @@

    UFDTNIA (Single Query Track)

    - + cvc5 - fixedn 0 1 @@ -186,7 +186,7 @@

    UFDTNIA (Single Query Track)

    - + 2020-CVC4n 0 1 @@ -197,7 +197,7 @@

    UFDTNIA (Single Query Track)

    - + iProver 0 1 @@ -219,7 +219,7 @@

    UFDTNIA (Single Query Track)

    - + 2020-Vampiren 0 10.0870.08810100 @@ -228,7 +228,7 @@

    UFDTNIA (Single Query Track)

    - + Vampire 0 10.1130.11510100 @@ -237,7 +237,7 @@

    UFDTNIA (Single Query Track)

    - + Vampire - fixedn 0 10.1170.11610100 @@ -246,7 +246,7 @@

    UFDTNIA (Single Query Track)

    - + cvc5 0 10.3660.36510100 @@ -255,7 +255,7 @@

    UFDTNIA (Single Query Track)

    - + cvc5 - fixedn 0 10.3670.36610100 @@ -264,7 +264,7 @@

    UFDTNIA (Single Query Track)

    - + 2020-CVC4n 0 10.4460.44610100 @@ -273,7 +273,7 @@

    UFDTNIA (Single Query Track)

    - + iProver 0 155.5714.30310100 @@ -293,7 +293,7 @@

    UFDTNIA (Single Query Track)

    - + 2020-Vampiren 0 00.00.0000010 @@ -302,7 +302,7 @@

    UFDTNIA (Single Query Track)

    - + 2020-CVC4n 0 00.00.0000010 @@ -311,7 +311,7 @@

    UFDTNIA (Single Query Track)

    - + cvc5 0 00.00.0000010 @@ -320,7 +320,7 @@

    UFDTNIA (Single Query Track)

    - + iProver 0 00.00.0000010 @@ -329,7 +329,7 @@

    UFDTNIA (Single Query Track)

    - + Vampire 0 00.00.0000010 @@ -338,7 +338,7 @@

    UFDTNIA (Single Query Track)

    - + Vampire - fixedn 0 00.00.0000010 @@ -347,7 +347,7 @@

    UFDTNIA (Single Query Track)

    - + cvc5 - fixedn 0 00.00.0000010 @@ -367,7 +367,7 @@

    UFDTNIA (Single Query Track)

    - + 2020-Vampiren 0 10.0870.088101000 @@ -376,7 +376,7 @@

    UFDTNIA (Single Query Track)

    - + Vampire 0 10.1130.115101000 @@ -385,7 +385,7 @@

    UFDTNIA (Single Query Track)

    - + Vampire - fixedn 0 10.1170.116101000 @@ -394,7 +394,7 @@

    UFDTNIA (Single Query Track)

    - + cvc5 0 10.3660.365101000 @@ -403,7 +403,7 @@

    UFDTNIA (Single Query Track)

    - + cvc5 - fixedn 0 10.3670.366101000 @@ -412,7 +412,7 @@

    UFDTNIA (Single Query Track)

    - + 2020-CVC4n 0 10.4460.446101000 @@ -421,7 +421,7 @@

    UFDTNIA (Single Query Track)

    - + iProver 0 155.5714.303101000 @@ -441,7 +441,7 @@

    UFDTNIA (Single Query Track)

    - + 2020-Vampiren 0 10.0870.08810100 @@ -450,7 +450,7 @@

    UFDTNIA (Single Query Track)

    - + Vampire 0 10.1130.11510100 @@ -459,7 +459,7 @@

    UFDTNIA (Single Query Track)

    - + Vampire - fixedn 0 10.1170.11610100 @@ -468,7 +468,7 @@

    UFDTNIA (Single Query Track)

    - + cvc5 0 10.3660.36510100 @@ -477,7 +477,7 @@

    UFDTNIA (Single Query Track)

    - + cvc5 - fixedn 0 10.3670.36610100 @@ -486,7 +486,7 @@

    UFDTNIA (Single Query Track)

    - + 2020-CVC4n 0 10.4460.44610100 @@ -495,7 +495,7 @@

    UFDTNIA (Single Query Track)

    - + iProver 0 155.5714.30310100 @@ -519,7 +519,6 @@

    UFDTNIA (Single Query Track)

    - + - diff --git a/archive/2021/results/ufdtnira-cloud.html b/archive/2021/results/ufdtnira-cloud.html index 5122a01a..92bcd6e3 100644 --- a/archive/2021/results/ufdtnira-cloud.html +++ b/archive/2021/results/ufdtnira-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFDTNIRA (Cloud Track)

    Competition results for the UFDTNIRA - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    UFDTNIRA (Cloud Track)

    - + cvc5-gg 0 23603.925202143 @@ -126,7 +126,7 @@

    UFDTNIRA (Cloud Track)

    - + Vampire 0 019200.00001616 @@ -146,7 +146,7 @@

    UFDTNIRA (Cloud Track)

    - + cvc5-gg 0 00.00000163 @@ -155,7 +155,7 @@

    UFDTNIRA (Cloud Track)

    - + Vampire 0 00.000001616 @@ -175,7 +175,7 @@

    UFDTNIRA (Cloud Track)

    - + cvc5-gg 0 23.9252020143 @@ -184,7 +184,7 @@

    UFDTNIRA (Cloud Track)

    - + Vampire 0 02400.000021416 @@ -204,7 +204,7 @@

    UFDTNIRA (Cloud Track)

    - + cvc5-gg 0 275.925202143 @@ -213,7 +213,7 @@

    UFDTNIRA (Cloud Track)

    - + Vampire 0 0384.00001616 @@ -237,7 +237,6 @@

    UFDTNIRA (Cloud Track)

    - + - diff --git a/archive/2021/results/ufdtnira-parallel.html b/archive/2021/results/ufdtnira-parallel.html index cdcfe9c9..e499d7dc 100644 --- a/archive/2021/results/ufdtnira-parallel.html +++ b/archive/2021/results/ufdtnira-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFDTNIRA (Parallel Track)

    Competition results for the UFDTNIRA - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    UFDTNIRA (Parallel Track)

    - + cvc5-gg 0 23.936202150 @@ -126,7 +126,7 @@

    UFDTNIRA (Parallel Track)

    - + Vampire 0 020400.00001717 @@ -146,7 +146,7 @@

    UFDTNIRA (Parallel Track)

    - + cvc5-gg 0 00.00000170 @@ -155,7 +155,7 @@

    UFDTNIRA (Parallel Track)

    - + Vampire 0 00.000001717 @@ -175,7 +175,7 @@

    UFDTNIRA (Parallel Track)

    - + cvc5-gg 0 23.9362020150 @@ -184,7 +184,7 @@

    UFDTNIRA (Parallel Track)

    - + Vampire 0 02400.000021517 @@ -204,7 +204,7 @@

    UFDTNIRA (Parallel Track)

    - + cvc5-gg 0 23.936202150 @@ -213,7 +213,7 @@

    UFDTNIRA (Parallel Track)

    - + Vampire 0 0408.00001717 @@ -237,7 +237,6 @@

    UFDTNIRA (Parallel Track)

    - + - diff --git a/archive/2021/results/ufdtnira-single-query.html b/archive/2021/results/ufdtnira-single-query.html index a49cf8da..511ecc49 100644 --- a/archive/2021/results/ufdtnira-single-query.html +++ b/archive/2021/results/ufdtnira-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFDTNIRA (Single Query Track)

    Competition results for the UFDTNIRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UFDTNIRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5— - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    UFDTNIRA (Single Query Track)

    - + 2020-CVC4n 0 819 @@ -142,7 +142,7 @@

    UFDTNIRA (Single Query Track)

    - + cvc5 - fixedn 0 819 @@ -153,7 +153,7 @@

    UFDTNIRA (Single Query Track)

    - + cvc5 0 818 @@ -164,7 +164,7 @@

    UFDTNIRA (Single Query Track)

    - + 2020-Vampiren 0 782 @@ -175,7 +175,7 @@

    UFDTNIRA (Single Query Track)

    - + Vampire 0 694 @@ -186,7 +186,7 @@

    UFDTNIRA (Single Query Track)

    - + Vampire - fixedn 0 682 @@ -197,7 +197,7 @@

    UFDTNIRA (Single Query Track)

    - + iProver 0 483 @@ -219,7 +219,7 @@

    UFDTNIRA (Single Query Track)

    - + 2020-Vampiren 0 827325568.727161708.29382708279483 @@ -228,7 +228,7 @@

    UFDTNIRA (Single Query Track)

    - + 2020-CVC4n 0 81927730.76227730.529819081910223 @@ -237,7 +237,7 @@

    UFDTNIRA (Single Query Track)

    - + cvc5 - fixedn 0 81937817.84437817.972819081910230 @@ -246,7 +246,7 @@

    UFDTNIRA (Single Query Track)

    - + cvc5 0 81838087.56138087.33818081810331 @@ -255,7 +255,7 @@

    UFDTNIRA (Single Query Track)

    - + Vampire 0 712424084.159297028.9237120712209209 @@ -264,7 +264,7 @@

    UFDTNIRA (Single Query Track)

    - + Vampire - fixedn 0 700406829.703288757.9397000700221203 @@ -273,7 +273,7 @@

    UFDTNIRA (Single Query Track)

    - + iProver 0 497582439.918527653.9774970497424424 @@ -293,7 +293,7 @@

    UFDTNIRA (Single Query Track)

    - + 2020-Vampiren 0 00.00.0000092183 @@ -302,7 +302,7 @@

    UFDTNIRA (Single Query Track)

    - + 2020-CVC4n 0 00.00.0000092123 @@ -311,7 +311,7 @@

    UFDTNIRA (Single Query Track)

    - + cvc5 0 00.00.0000092131 @@ -320,7 +320,7 @@

    UFDTNIRA (Single Query Track)

    - + iProver 0 00.00.00000921424 @@ -329,7 +329,7 @@

    UFDTNIRA (Single Query Track)

    - + Vampire 0 00.00.00000921209 @@ -338,7 +338,7 @@

    UFDTNIRA (Single Query Track)

    - + Vampire - fixedn 0 00.00.00000921203 @@ -347,7 +347,7 @@

    UFDTNIRA (Single Query Track)

    - + cvc5 - fixedn 0 00.00.0000092130 @@ -367,7 +367,7 @@

    UFDTNIRA (Single Query Track)

    - + 2020-Vampiren 0 827293424.307132051.8878270827692583 @@ -376,7 +376,7 @@

    UFDTNIRA (Single Query Track)

    - + 2020-CVC4n 0 81924129.11524128.8888190819772523 @@ -385,7 +385,7 @@

    UFDTNIRA (Single Query Track)

    - + cvc5 - fixedn 0 81934191.10334191.2348190819772530 @@ -394,7 +394,7 @@

    UFDTNIRA (Single Query Track)

    - + cvc5 0 81834444.49934444.2718180818782531 @@ -403,7 +403,7 @@

    UFDTNIRA (Single Query Track)

    - + Vampire 0 712390483.999267035.293712071218425209 @@ -412,7 +412,7 @@

    UFDTNIRA (Single Query Track)

    - + Vampire - fixedn 0 700376829.703258757.939700070019625203 @@ -421,7 +421,7 @@

    UFDTNIRA (Single Query Track)

    - + iProver 0 497552439.918497653.977497049739925424 @@ -441,7 +441,7 @@

    UFDTNIRA (Single Query Track)

    - + 2020-CVC4n 0 819682.762682.529819081910223 @@ -450,7 +450,7 @@

    UFDTNIRA (Single Query Track)

    - + cvc5 - fixedn 0 8151077.6731077.431815081510637 @@ -459,7 +459,7 @@

    UFDTNIRA (Single Query Track)

    - + cvc5 0 8141091.3561091.062814081410737 @@ -468,7 +468,7 @@

    UFDTNIRA (Single Query Track)

    - + Vampire 0 42812952.91212353.084280428493493 @@ -477,7 +477,7 @@

    UFDTNIRA (Single Query Track)

    - + Vampire - fixedn 0 42112834.30812309.6524210421500494 @@ -486,7 +486,7 @@

    UFDTNIRA (Single Query Track)

    - + iProver 0 41015974.19213412.3684100410511511 @@ -495,7 +495,7 @@

    UFDTNIRA (Single Query Track)

    - + 2020-Vampiren 0 38114875.01913833.9383810381540533 @@ -519,7 +519,6 @@

    UFDTNIRA (Single Query Track)

    - + - diff --git a/archive/2021/results/ufdtnira-unsat-core.html b/archive/2021/results/ufdtnira-unsat-core.html index e92f9d39..b2fd6b48 100644 --- a/archive/2021/results/ufdtnira-unsat-core.html +++ b/archive/2021/results/ufdtnira-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFDTNIRA (Unsat Core Track)

    Competition results for the UFDTNIRA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    UFDTNIRA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5-uccvc5-uc - - + + @@ -126,7 +126,7 @@

    UFDTNIRA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 11722 @@ -137,7 +137,7 @@

    UFDTNIRA (Unsat Core Track)

    - + cvc5-uc 0 11506 @@ -148,7 +148,7 @@

    UFDTNIRA (Unsat Core Track)

    - + Vampire 0 1683 @@ -170,7 +170,7 @@

    UFDTNIRA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 117221434.2341434.8811 @@ -179,7 +179,7 @@

    UFDTNIRA (Unsat Core Track)

    - + cvc5-uc 0 115068442.8828442.9177 @@ -188,7 +188,7 @@

    UFDTNIRA (Unsat Core Track)

    - + Vampire 0 1683107133.74152843.62227 @@ -212,7 +212,6 @@

    UFDTNIRA (Unsat Core Track)

    - + - diff --git a/archive/2021/results/uffpdtlira-single-query.html b/archive/2021/results/uffpdtlira-single-query.html index 86214062..fbfb2db9 100644 --- a/archive/2021/results/uffpdtlira-single-query.html +++ b/archive/2021/results/uffpdtlira-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFFPDTLIRA (Single Query Track)

    Competition results for the UFFPDTLIRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UFFPDTLIRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    UFFPDTLIRA (Single Query Track)

    - + cvc5 - fixedn 0 344 @@ -142,7 +142,7 @@

    UFFPDTLIRA (Single Query Track)

    - + cvc5 0 344 @@ -153,7 +153,7 @@

    UFFPDTLIRA (Single Query Track)

    - + 2020-CVC4n 0 325 @@ -175,7 +175,7 @@

    UFFPDTLIRA (Single Query Track)

    - + cvc5 0 34426389.48826464.203344413033717 @@ -184,7 +184,7 @@

    UFFPDTLIRA (Single Query Track)

    - + cvc5 - fixedn 0 34426676.40826849.433344413033717 @@ -193,7 +193,7 @@

    UFFPDTLIRA (Single Query Track)

    - + 2020-CVC4n 0 32517404.25917366.826325223035613 @@ -213,7 +213,7 @@

    UFFPDTLIRA (Single Query Track)

    - + cvc5 0 411082.4031083.2841410034017 @@ -222,7 +222,7 @@

    UFFPDTLIRA (Single Query Track)

    - + cvc5 - fixedn 0 411096.9421097.85941410034017 @@ -231,7 +231,7 @@

    UFFPDTLIRA (Single Query Track)

    - + 2020-CVC4n 0 2290.07990.15222201934013 @@ -251,7 +251,7 @@

    UFFPDTLIRA (Single Query Track)

    - + 2020-CVC4n 0 3032897.2182859.16303030327613 @@ -260,7 +260,7 @@

    UFFPDTLIRA (Single Query Track)

    - + cvc5 - fixedn 0 3034911.7644947.896303030327617 @@ -269,7 +269,7 @@

    UFFPDTLIRA (Single Query Track)

    - + cvc5 0 3035087.6265133.947303030327617 @@ -289,7 +289,7 @@

    UFFPDTLIRA (Single Query Track)

    - + cvc5 0 3301228.5451223.984330322985142 @@ -298,7 +298,7 @@

    UFFPDTLIRA (Single Query Track)

    - + cvc5 - fixedn 0 3301223.6641225.842330322985142 @@ -307,7 +307,7 @@

    UFFPDTLIRA (Single Query Track)

    - + 2020-CVC4n 0 320825.273825.877320222986123 @@ -331,7 +331,6 @@

    UFFPDTLIRA (Single Query Track)

    - + - diff --git a/archive/2021/results/uffpdtlira-unsat-core.html b/archive/2021/results/uffpdtlira-unsat-core.html index ebf4398f..1ddaf7a3 100644 --- a/archive/2021/results/uffpdtlira-unsat-core.html +++ b/archive/2021/results/uffpdtlira-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFFPDTLIRA (Unsat Core Track)

    Competition results for the UFFPDTLIRA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    UFFPDTLIRA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5-uccvc5-uc - - + + @@ -126,7 +126,7 @@

    UFFPDTLIRA (Unsat Core Track)

    - + cvc5-uc 0 5497 @@ -137,7 +137,7 @@

    UFFPDTLIRA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 5483 @@ -159,7 +159,7 @@

    UFFPDTLIRA (Unsat Core Track)

    - + cvc5-uc 0 54972009.6922009.811 @@ -168,7 +168,7 @@

    UFFPDTLIRA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 54831962.6391963.3941 @@ -192,7 +192,6 @@

    UFFPDTLIRA (Unsat Core Track)

    - + - diff --git a/archive/2021/results/uffpdtnira-single-query.html b/archive/2021/results/uffpdtnira-single-query.html index 03ffa5df..4e86c7ae 100644 --- a/archive/2021/results/uffpdtnira-single-query.html +++ b/archive/2021/results/uffpdtnira-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFFPDTNIRA (Single Query Track)

    Competition results for the UFFPDTNIRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UFFPDTNIRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    UFFPDTNIRA (Single Query Track)

    - + cvc5 0 11 @@ -142,7 +142,7 @@

    UFFPDTNIRA (Single Query Track)

    - + cvc5 - fixedn 0 10 @@ -153,7 +153,7 @@

    UFFPDTNIRA (Single Query Track)

    - + 2020-CVC4n 0 7 @@ -175,7 +175,7 @@

    UFFPDTNIRA (Single Query Track)

    - + cvc5 0 1110497.47910598.6441147108 @@ -184,7 +184,7 @@

    UFFPDTNIRA (Single Query Track)

    - + cvc5 - fixedn 0 1011386.25311405.3141055119 @@ -193,7 +193,7 @@

    UFFPDTNIRA (Single Query Track)

    - + 2020-CVC4n 0 77207.9917207.986707146 @@ -213,7 +213,7 @@

    UFFPDTNIRA (Single Query Track)

    - + cvc5 - fixedn 0 5583.7602.7655500169 @@ -222,7 +222,7 @@

    UFFPDTNIRA (Single Query Track)

    - + cvc5 0 41299.1081311.234401168 @@ -231,7 +231,7 @@

    UFFPDTNIRA (Single Query Track)

    - + 2020-CVC4n 0 02.3642.3630005166 @@ -251,7 +251,7 @@

    UFFPDTNIRA (Single Query Track)

    - + 2020-CVC4n 0 74.874.8687070146 @@ -260,7 +260,7 @@

    UFFPDTNIRA (Single Query Track)

    - + cvc5 0 7797.674886.727070148 @@ -269,7 +269,7 @@

    UFFPDTNIRA (Single Query Track)

    - + cvc5 - fixedn 0 52401.8482401.8475052149 @@ -289,7 +289,7 @@

    UFFPDTNIRA (Single Query Track)

    - + 2020-CVC4n 0 7151.991151.986707146 @@ -298,7 +298,7 @@

    UFFPDTNIRA (Single Query Track)

    - + cvc5 - fixedn 0 7295.184295.2677251412 @@ -307,7 +307,7 @@

    UFFPDTNIRA (Single Query Track)

    - + cvc5 0 7295.317295.3387251412 @@ -331,7 +331,6 @@

    UFFPDTNIRA (Single Query Track)

    - + - diff --git a/archive/2021/results/uffpdtnira-unsat-core.html b/archive/2021/results/uffpdtnira-unsat-core.html index 855523cc..544763fe 100644 --- a/archive/2021/results/uffpdtnira-unsat-core.html +++ b/archive/2021/results/uffpdtnira-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFFPDTNIRA (Unsat Core Track)

    Competition results for the UFFPDTNIRA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    UFFPDTNIRA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5-uccvc5-uc - - + + @@ -126,7 +126,7 @@

    UFFPDTNIRA (Unsat Core Track)

    - + cvc5-uc 0 6 @@ -137,7 +137,7 @@

    UFFPDTNIRA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 6 @@ -159,7 +159,7 @@

    UFFPDTNIRA (Unsat Core Track)

    - + cvc5-uc 0 60.1920.1920 @@ -168,7 +168,7 @@

    UFFPDTNIRA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 60.2010.2010 @@ -192,7 +192,6 @@

    UFFPDTNIRA (Unsat Core Track)

    - + - diff --git a/archive/2021/results/ufidl-cloud.html b/archive/2021/results/ufidl-cloud.html index 1e875e38..9c18265e 100644 --- a/archive/2021/results/ufidl-cloud.html +++ b/archive/2021/results/ufidl-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFIDL (Cloud Track)

    Competition results for the UFIDL - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    UFIDL (Cloud Track)

    - + cvc5-gg 0 46009.05740455 @@ -126,7 +126,7 @@

    UFIDL (Cloud Track)

    - + Par4n 0 110080.35411088 @@ -135,7 +135,7 @@

    UFIDL (Cloud Track)

    - + Vampire 0 010800.000099 @@ -155,7 +155,7 @@

    UFIDL (Cloud Track)

    - + Par4n 0 1480.354110088 @@ -164,7 +164,7 @@

    UFIDL (Cloud Track)

    - + cvc5-gg 0 01200.0000185 @@ -173,7 +173,7 @@

    UFIDL (Cloud Track)

    - + Vampire 0 01200.0000189 @@ -193,7 +193,7 @@

    UFIDL (Cloud Track)

    - + cvc5-gg 0 49.057404055 @@ -202,7 +202,7 @@

    UFIDL (Cloud Track)

    - + Par4n 0 04800.0000458 @@ -211,7 +211,7 @@

    UFIDL (Cloud Track)

    - + Vampire 0 04800.0000459 @@ -231,7 +231,7 @@

    UFIDL (Cloud Track)

    - + cvc5-gg 0 4129.05740455 @@ -240,7 +240,7 @@

    UFIDL (Cloud Track)

    - + Par4n 0 0216.000099 @@ -249,7 +249,7 @@

    UFIDL (Cloud Track)

    - + Vampire 0 0216.000099 @@ -273,7 +273,6 @@

    UFIDL (Cloud Track)

    - + - diff --git a/archive/2021/results/ufidl-parallel.html b/archive/2021/results/ufidl-parallel.html index 1de25028..5ae24c95 100644 --- a/archive/2021/results/ufidl-parallel.html +++ b/archive/2021/results/ufidl-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFIDL (Parallel Track)

    Competition results for the UFIDL - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    UFIDL (Parallel Track)

    - + cvc5-gg 0 43607.83640453 @@ -126,7 +126,7 @@

    UFIDL (Parallel Track)

    - + Par4n 0 110080.29711088 @@ -135,7 +135,7 @@

    UFIDL (Parallel Track)

    - + Vampire 0 010800.000099 @@ -155,7 +155,7 @@

    UFIDL (Parallel Track)

    - + Par4n 0 1480.297110088 @@ -164,7 +164,7 @@

    UFIDL (Parallel Track)

    - + cvc5-gg 0 01200.0000183 @@ -173,7 +173,7 @@

    UFIDL (Parallel Track)

    - + Vampire 0 01200.0000189 @@ -193,7 +193,7 @@

    UFIDL (Parallel Track)

    - + cvc5-gg 0 47.836404053 @@ -202,7 +202,7 @@

    UFIDL (Parallel Track)

    - + Par4n 0 04800.0000458 @@ -211,7 +211,7 @@

    UFIDL (Parallel Track)

    - + Vampire 0 04800.0000459 @@ -231,7 +231,7 @@

    UFIDL (Parallel Track)

    - + cvc5-gg 0 479.83640453 @@ -240,7 +240,7 @@

    UFIDL (Parallel Track)

    - + Par4n 0 0216.000099 @@ -249,7 +249,7 @@

    UFIDL (Parallel Track)

    - + Vampire 0 0216.000099 @@ -273,7 +273,6 @@

    UFIDL (Parallel Track)

    - + - diff --git a/archive/2021/results/ufidl-single-query.html b/archive/2021/results/ufidl-single-query.html index b1f4e14b..ea2467bb 100644 --- a/archive/2021/results/ufidl-single-query.html +++ b/archive/2021/results/ufidl-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFIDL (Single Query Track)

    Competition results for the UFIDL - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UFIDL (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    UFIDL (Single Query Track)

    - + 2019-Par4n 0 12 @@ -142,7 +142,7 @@

    UFIDL (Single Query Track)

    - + z3n 0 11 @@ -153,7 +153,7 @@

    UFIDL (Single Query Track)

    - + 2020-z3n 0 11 @@ -164,7 +164,7 @@

    UFIDL (Single Query Track)

    - + cvc5 0 10 @@ -175,7 +175,7 @@

    UFIDL (Single Query Track)

    - + cvc5 - fixedn 0 10 @@ -186,7 +186,7 @@

    UFIDL (Single Query Track)

    - + 2020-CVC4n 0 10 @@ -197,7 +197,7 @@

    UFIDL (Single Query Track)

    - + SMTInterpol 0 8 @@ -208,7 +208,7 @@

    UFIDL (Single Query Track)

    - + veriT 0 7 @@ -219,7 +219,7 @@

    UFIDL (Single Query Track)

    - + iProver 0 7 @@ -230,7 +230,7 @@

    UFIDL (Single Query Track)

    - + Vampire 0 7 @@ -241,7 +241,7 @@

    UFIDL (Single Query Track)

    - + 2020-Vampiren 0 7 @@ -252,7 +252,7 @@

    UFIDL (Single Query Track)

    - + Vampire - fixedn 0 6 @@ -263,7 +263,7 @@

    UFIDL (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -285,7 +285,7 @@

    UFIDL (Single Query Track)

    - + 2019-Par4n 0 1210663.31510147.1123988 @@ -294,7 +294,7 @@

    UFIDL (Single Query Track)

    - + z3n 0 117375.1167375.359112995 @@ -303,7 +303,7 @@

    UFIDL (Single Query Track)

    - + 2020-z3n 0 117463.6677463.835112995 @@ -312,7 +312,7 @@

    UFIDL (Single Query Track)

    - + 2020-CVC4n 0 1010088.910148.9161019108 @@ -321,7 +321,7 @@

    UFIDL (Single Query Track)

    - + cvc5 - fixedn 0 1010119.26110170.5181019108 @@ -330,7 +330,7 @@

    UFIDL (Single Query Track)

    - + cvc5 0 1010119.52910186.7831019108 @@ -339,7 +339,7 @@

    UFIDL (Single Query Track)

    - + SMTInterpol 0 88552.1798465.001817127 @@ -348,7 +348,7 @@

    UFIDL (Single Query Track)

    - + veriT 0 712299.04612299.0647071310 @@ -357,7 +357,7 @@

    UFIDL (Single Query Track)

    - + iProver 0 713285.50213224.9027071311 @@ -366,7 +366,7 @@

    UFIDL (Single Query Track)

    - + 2020-Vampiren 0 715600.83615600.8317071313 @@ -375,7 +375,7 @@

    UFIDL (Single Query Track)

    - + Vampire 0 715601.54515601.5457071313 @@ -384,7 +384,7 @@

    UFIDL (Single Query Track)

    - + Vampire - fixedn 0 615601.63115601.6246061413 @@ -393,7 +393,7 @@

    UFIDL (Single Query Track)

    - + UltimateEliminator+MathSAT 0 01248.963948.875000200 @@ -413,7 +413,7 @@

    UFIDL (Single Query Track)

    - + 2019-Par4n 0 31063.253546.4063300178 @@ -422,7 +422,7 @@

    UFIDL (Single Query Track)

    - + z3n 0 21200.0691200.0692201175 @@ -431,7 +431,7 @@

    UFIDL (Single Query Track)

    - + 2020-z3n 0 21200.1031200.1032201175 @@ -440,7 +440,7 @@

    UFIDL (Single Query Track)

    - + 2020-CVC4n 0 1487.355547.3731102178 @@ -449,7 +449,7 @@

    UFIDL (Single Query Track)

    - + cvc5 - fixedn 0 1517.949569.2111102178 @@ -458,7 +458,7 @@

    UFIDL (Single Query Track)

    - + cvc5 0 1518.24585.4991102178 @@ -467,7 +467,7 @@

    UFIDL (Single Query Track)

    - + SMTInterpol 0 11201.4691200.9131102177 @@ -476,7 +476,7 @@

    UFIDL (Single Query Track)

    - + UltimateEliminator+MathSAT 0 014.3548.6160003170 @@ -485,7 +485,7 @@

    UFIDL (Single Query Track)

    - + veriT 0 01200.0331200.03800031710 @@ -494,7 +494,7 @@

    UFIDL (Single Query Track)

    - + iProver 0 01201.0581201.52700031711 @@ -503,7 +503,7 @@

    UFIDL (Single Query Track)

    - + 2020-Vampiren 0 03600.03600.000031713 @@ -512,7 +512,7 @@

    UFIDL (Single Query Track)

    - + Vampire 0 03600.03600.000031713 @@ -521,7 +521,7 @@

    UFIDL (Single Query Track)

    - + Vampire - fixedn 0 03600.03600.000031713 @@ -541,7 +541,7 @@

    UFIDL (Single Query Track)

    - + 2019-Par4n 0 90.0620.6949090118 @@ -550,7 +550,7 @@

    UFIDL (Single Query Track)

    - + z3n 0 90.8120.8139090115 @@ -559,7 +559,7 @@

    UFIDL (Single Query Track)

    - + 2020-z3n 0 90.840.8419090115 @@ -568,7 +568,7 @@

    UFIDL (Single Query Track)

    - + cvc5 0 91.2891.2849090118 @@ -577,7 +577,7 @@

    UFIDL (Single Query Track)

    - + cvc5 - fixedn 0 91.3111.3089090118 @@ -586,7 +586,7 @@

    UFIDL (Single Query Track)

    - + 2020-CVC4n 0 91.5451.5439090118 @@ -595,7 +595,7 @@

    UFIDL (Single Query Track)

    - + veriT 0 71499.0131499.02670721110 @@ -604,7 +604,7 @@

    UFIDL (Single Query Track)

    - + 2020-Vampiren 0 72400.8362400.83170721113 @@ -613,7 +613,7 @@

    UFIDL (Single Query Track)

    - + Vampire 0 72401.5452401.54570721113 @@ -622,7 +622,7 @@

    UFIDL (Single Query Track)

    - + iProver 0 72484.4442423.37570721111 @@ -631,7 +631,7 @@

    UFIDL (Single Query Track)

    - + SMTInterpol 0 72540.0012459.8547072117 @@ -640,7 +640,7 @@

    UFIDL (Single Query Track)

    - + Vampire - fixedn 0 62401.6312401.62460631113 @@ -649,7 +649,7 @@

    UFIDL (Single Query Track)

    - + UltimateEliminator+MathSAT 0 01189.148915.6920009110 @@ -669,7 +669,7 @@

    UFIDL (Single Query Track)

    - + 2019-Par4n 0 11216.075216.832112999 @@ -678,7 +678,7 @@

    UFIDL (Single Query Track)

    - + z3n 0 11216.882216.883112999 @@ -687,7 +687,7 @@

    UFIDL (Single Query Track)

    - + 2020-z3n 0 11216.943216.944112999 @@ -696,7 +696,7 @@

    UFIDL (Single Query Track)

    - + cvc5 - fixedn 0 9217.876217.875909119 @@ -705,7 +705,7 @@

    UFIDL (Single Query Track)

    - + cvc5 0 9218.134218.124909119 @@ -714,7 +714,7 @@

    UFIDL (Single Query Track)

    - + 2020-CVC4n 0 9218.163218.218909119 @@ -723,7 +723,7 @@

    UFIDL (Single Query Track)

    - + SMTInterpol 0 8320.179233.001817127 @@ -732,7 +732,7 @@

    UFIDL (Single Query Track)

    - + veriT 0 7264.185264.1887071311 @@ -741,7 +741,7 @@

    UFIDL (Single Query Track)

    - + iProver 0 7349.502288.9027071311 @@ -750,7 +750,7 @@

    UFIDL (Single Query Track)

    - + 2020-Vampiren 0 7312.836312.8317071313 @@ -759,7 +759,7 @@

    UFIDL (Single Query Track)

    - + Vampire 0 7313.545313.5457071313 @@ -768,7 +768,7 @@

    UFIDL (Single Query Track)

    - + Vampire - fixedn 0 6313.631313.6246061413 @@ -777,7 +777,7 @@

    UFIDL (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0235.689176.999000205 @@ -801,7 +801,6 @@

    UFIDL (Single Query Track)

    - + - diff --git a/archive/2021/results/ufidl-unsat-core.html b/archive/2021/results/ufidl-unsat-core.html index d6c56eeb..90c9053e 100644 --- a/archive/2021/results/ufidl-unsat-core.html +++ b/archive/2021/results/ufidl-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFIDL (Unsat Core Track)

    Competition results for the UFIDL - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    UFIDL (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5-uccvc5-uc - - + + @@ -126,7 +126,7 @@

    UFIDL (Unsat Core Track)

    - + 2020-z3n 0 1916 @@ -137,7 +137,7 @@

    UFIDL (Unsat Core Track)

    - + cvc5-uc 0 1916 @@ -148,7 +148,7 @@

    UFIDL (Unsat Core Track)

    - + z3n 0 1915 @@ -159,7 +159,7 @@

    UFIDL (Unsat Core Track)

    - + 2020-CVC4-ucn 0 1915 @@ -170,7 +170,7 @@

    UFIDL (Unsat Core Track)

    - + Vampire 0 999 @@ -181,7 +181,7 @@

    UFIDL (Unsat Core Track)

    - + SMTInterpol-remus 0 983 @@ -192,7 +192,7 @@

    UFIDL (Unsat Core Track)

    - + SMTInterpol 0 982 @@ -203,7 +203,7 @@

    UFIDL (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -225,7 +225,7 @@

    UFIDL (Unsat Core Track)

    - + 2020-z3n 0 19161.4371.4610 @@ -234,7 +234,7 @@

    UFIDL (Unsat Core Track)

    - + cvc5-uc 0 19161.941.9260 @@ -243,7 +243,7 @@

    UFIDL (Unsat Core Track)

    - + z3n 0 19151.0581.0680 @@ -252,7 +252,7 @@

    UFIDL (Unsat Core Track)

    - + 2020-CVC4-ucn 0 19152.3132.3610 @@ -261,7 +261,7 @@

    UFIDL (Unsat Core Track)

    - + Vampire 0 9992402.3472402.3512 @@ -270,7 +270,7 @@

    UFIDL (Unsat Core Track)

    - + SMTInterpol-remus 0 9834549.374405.9483 @@ -279,7 +279,7 @@

    UFIDL (Unsat Core Track)

    - + SMTInterpol 0 9823646.2383624.9973 @@ -288,7 +288,7 @@

    UFIDL (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0154.64588.7880 @@ -312,7 +312,6 @@

    UFIDL (Unsat Core Track)

    - + - diff --git a/archive/2021/results/uflia-cloud.html b/archive/2021/results/uflia-cloud.html index 9304d17a..57adbca3 100644 --- a/archive/2021/results/uflia-cloud.html +++ b/archive/2021/results/uflia-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFLIA (Cloud Track)

    Competition results for the UFLIA - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    UFLIA (Cloud Track)

    - + Vampire 0 910252.30490977 @@ -126,7 +126,7 @@

    UFLIA (Cloud Track)

    - + Par4n 0 317703.4453031313 @@ -135,7 +135,7 @@

    UFLIA (Cloud Track)

    - + cvc5-gg 0 018000.00001615 @@ -155,7 +155,7 @@

    UFLIA (Cloud Track)

    - + cvc5-gg 0 00.000001615 @@ -164,7 +164,7 @@

    UFLIA (Cloud Track)

    - + Par4n 0 00.000001613 @@ -173,7 +173,7 @@

    UFLIA (Cloud Track)

    - + Vampire 0 00.00000167 @@ -193,7 +193,7 @@

    UFLIA (Cloud Track)

    - + Vampire 0 91852.304909077 @@ -202,7 +202,7 @@

    UFLIA (Cloud Track)

    - + Par4n 0 39303.4453036713 @@ -211,7 +211,7 @@

    UFLIA (Cloud Track)

    - + cvc5-gg 0 09600.00009715 @@ -231,7 +231,7 @@

    UFLIA (Cloud Track)

    - + Vampire 0 2340.0162021414 @@ -240,7 +240,7 @@

    UFLIA (Cloud Track)

    - + cvc5-gg 0 0360.00001615 @@ -249,7 +249,7 @@

    UFLIA (Cloud Track)

    - + Par4n 0 0384.00001616 @@ -273,7 +273,6 @@

    UFLIA (Cloud Track)

    - + - diff --git a/archive/2021/results/uflia-parallel.html b/archive/2021/results/uflia-parallel.html index e8286245..03d1857b 100644 --- a/archive/2021/results/uflia-parallel.html +++ b/archive/2021/results/uflia-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFLIA (Parallel Track)

    Competition results for the UFLIA - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    UFLIA (Parallel Track)

    - + Vampire 0 514788.4855051212 @@ -126,7 +126,7 @@

    UFLIA (Parallel Track)

    - + Par4n 0 318899.3563031414 @@ -135,7 +135,7 @@

    UFLIA (Parallel Track)

    - + cvc5-gg 0 01200.0000171 @@ -155,7 +155,7 @@

    UFLIA (Parallel Track)

    - + cvc5-gg 0 00.00000171 @@ -164,7 +164,7 @@

    UFLIA (Parallel Track)

    - + Par4n 0 00.000001714 @@ -173,7 +173,7 @@

    UFLIA (Parallel Track)

    - + Vampire 0 00.000001712 @@ -193,7 +193,7 @@

    UFLIA (Parallel Track)

    - + Vampire 0 53988.4855053912 @@ -202,7 +202,7 @@

    UFLIA (Parallel Track)

    - + Par4n 0 38099.3563035914 @@ -211,7 +211,7 @@

    UFLIA (Parallel Track)

    - + cvc5-gg 0 00.0000891 @@ -231,7 +231,7 @@

    UFLIA (Parallel Track)

    - + Vampire 0 3341.973031414 @@ -240,7 +240,7 @@

    UFLIA (Parallel Track)

    - + cvc5-gg 0 024.0000171 @@ -249,7 +249,7 @@

    UFLIA (Parallel Track)

    - + Par4n 0 0408.00001717 @@ -273,7 +273,6 @@

    UFLIA (Parallel Track)

    - + - diff --git a/archive/2021/results/uflia-single-query.html b/archive/2021/results/uflia-single-query.html index 60b71fc4..7641d605 100644 --- a/archive/2021/results/uflia-single-query.html +++ b/archive/2021/results/uflia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFLIA (Single Query Track)

    Competition results for the UFLIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UFLIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5SMTInterpol - - + + cvc5 - - + + veriT - + @@ -131,7 +131,7 @@

    UFLIA (Single Query Track)

    - + 2019-Par4n 0 1641 @@ -142,7 +142,7 @@

    UFLIA (Single Query Track)

    - + 2020-CVC4n 0 1615 @@ -153,7 +153,7 @@

    UFLIA (Single Query Track)

    - + cvc5 - fixedn 0 1589 @@ -164,7 +164,7 @@

    UFLIA (Single Query Track)

    - + cvc5 0 1584 @@ -175,7 +175,7 @@

    UFLIA (Single Query Track)

    - + veriT 0 1558 @@ -186,7 +186,7 @@

    UFLIA (Single Query Track)

    - + Vampire 0 1451 @@ -197,7 +197,7 @@

    UFLIA (Single Query Track)

    - + Vampire - fixedn 0 1444 @@ -208,7 +208,7 @@

    UFLIA (Single Query Track)

    - + 2020-Vampiren 0 1420 @@ -219,7 +219,7 @@

    UFLIA (Single Query Track)

    - + 2020-z3n 0 1324 @@ -230,7 +230,7 @@

    UFLIA (Single Query Track)

    - + z3n 0 1255 @@ -241,7 +241,7 @@

    UFLIA (Single Query Track)

    - + iProver 0 664 @@ -252,7 +252,7 @@

    UFLIA (Single Query Track)

    - + SMTInterpol 0 410 @@ -263,7 +263,7 @@

    UFLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -285,7 +285,7 @@

    UFLIA (Single Query Track)

    - + 2019-Par4n 0 16601482580.8471444909.94616609165111881144 @@ -294,7 +294,7 @@

    UFLIA (Single Query Track)

    - + 2020-CVC4n 0 16151497922.2681498707.37916152161312331211 @@ -303,7 +303,7 @@

    UFLIA (Single Query Track)

    - + cvc5 - fixedn 0 15891508258.0161508454.73115890158912591240 @@ -312,7 +312,7 @@

    UFLIA (Single Query Track)

    - + cvc5 0 15841514481.7611514759.84315841158312641245 @@ -321,7 +321,7 @@

    UFLIA (Single Query Track)

    - + veriT 0 15581547913.8791547786.62115580155812901251 @@ -330,7 +330,7 @@

    UFLIA (Single Query Track)

    - + Vampire 0 14761794963.4221597977.96214760147613721286 @@ -339,7 +339,7 @@

    UFLIA (Single Query Track)

    - + Vampire - fixedn 0 14731809357.9531593600.40414730147313751279 @@ -348,7 +348,7 @@

    UFLIA (Single Query Track)

    - + 2020-Vampiren 0 14501802364.7751604654.49214500145013981289 @@ -357,7 +357,7 @@

    UFLIA (Single Query Track)

    - + 2020-z3n 0 13241458752.0461460856.1581324713171524982 @@ -366,7 +366,7 @@

    UFLIA (Single Query Track)

    - + z3n 0 12551511096.8961516726.23712557124815931000 @@ -375,7 +375,7 @@

    UFLIA (Single Query Track)

    - + iProver 0 7202757168.5442604321.45720072021282082 @@ -384,7 +384,7 @@

    UFLIA (Single Query Track)

    - + SMTInterpol 0 4132951289.2612851222.323413341024352313 @@ -393,7 +393,7 @@

    UFLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 023602.24116869.59900028484 @@ -413,7 +413,7 @@

    UFLIA (Single Query Track)

    - + 2019-Par4n 0 93029.8991028.778990028391144 @@ -422,7 +422,7 @@

    UFLIA (Single Query Track)

    - + z3n 0 72400.2862400.288770228391000 @@ -431,7 +431,7 @@

    UFLIA (Single Query Track)

    - + 2020-z3n 0 72400.3892400.38977022839982 @@ -440,7 +440,7 @@

    UFLIA (Single Query Track)

    - + SMTInterpol 0 32407.8422404.05330628392313 @@ -449,7 +449,7 @@

    UFLIA (Single Query Track)

    - + 2020-CVC4n 0 25037.5145120.263220728391211 @@ -458,7 +458,7 @@

    UFLIA (Single Query Track)

    - + cvc5 0 17053.4147147.77110828391245 @@ -467,7 +467,7 @@

    UFLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 044.89127.676000928394 @@ -476,7 +476,7 @@

    UFLIA (Single Query Track)

    - + veriT 0 07202.8067202.796000928391251 @@ -485,7 +485,7 @@

    UFLIA (Single Query Track)

    - + cvc5 - fixedn 0 07733.2477733.283000928391240 @@ -494,7 +494,7 @@

    UFLIA (Single Query Track)

    - + Vampire - fixedn 0 014400.1310797.88000928391279 @@ -503,7 +503,7 @@

    UFLIA (Single Query Track)

    - + 2020-Vampiren 0 010800.010800.0000928391289 @@ -512,7 +512,7 @@

    UFLIA (Single Query Track)

    - + iProver 0 010800.010800.0000928392082 @@ -521,7 +521,7 @@

    UFLIA (Single Query Track)

    - + Vampire 0 010800.010800.0000928391286 @@ -541,7 +541,7 @@

    UFLIA (Single Query Track)

    - + 2019-Par4n 0 1651129550.94893881.1691651016516311341144 @@ -550,7 +550,7 @@

    UFLIA (Single Query Track)

    - + 2020-CVC4n 0 1613161310.292161990.92916130161310111341211 @@ -559,7 +559,7 @@

    UFLIA (Single Query Track)

    - + cvc5 - fixedn 0 1589167317.249167502.92815890158912511341240 @@ -568,7 +568,7 @@

    UFLIA (Single Query Track)

    - + cvc5 0 1583174210.178174376.96215830158313111341245 @@ -577,7 +577,7 @@

    UFLIA (Single Query Track)

    - + veriT 0 1558217986.413217882.68215580155815611341251 @@ -586,7 +586,7 @@

    UFLIA (Single Query Track)

    - + Vampire 0 1476400906.742237911.7114760147623811341286 @@ -595,7 +595,7 @@

    UFLIA (Single Query Track)

    - + Vampire - fixedn 0 1473408956.223232842.15414730147324111341279 @@ -604,7 +604,7 @@

    UFLIA (Single Query Track)

    - + 2020-Vampiren 0 1450412763.495243903.13214500145026411341289 @@ -613,7 +613,7 @@

    UFLIA (Single Query Track)

    - + 2020-z3n 0 1317390362.541390795.8651317013173971134982 @@ -622,7 +622,7 @@

    UFLIA (Single Query Track)

    - + z3n 0 1248466958.121468614.02512480124846611341000 @@ -631,7 +631,7 @@

    UFLIA (Single Query Track)

    - + iProver 0 7201396368.5441243521.45720072099411342082 @@ -640,7 +640,7 @@

    UFLIA (Single Query Track)

    - + SMTInterpol 0 4101621147.9441583693.9174100410130411342313 @@ -649,7 +649,7 @@

    UFLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 015754.98511644.228000171411344 @@ -669,7 +669,7 @@

    UFLIA (Single Query Track)

    - + 2019-Par4n 0 157332251.3831316.83315737156612751231 @@ -678,7 +678,7 @@

    UFLIA (Single Query Track)

    - + veriT 0 148434093.31734091.99914840148413641342 @@ -687,7 +687,7 @@

    UFLIA (Single Query Track)

    - + cvc5 0 146634674.52134646.37614660146613821374 @@ -696,7 +696,7 @@

    UFLIA (Single Query Track)

    - + cvc5 - fixedn 0 146634682.21234658.03614660146613821374 @@ -705,7 +705,7 @@

    UFLIA (Single Query Track)

    - + 2020-CVC4n 0 143035768.60435758.11714300143014181411 @@ -714,7 +714,7 @@

    UFLIA (Single Query Track)

    - + Vampire 0 127849625.09240834.83912780127815701570 @@ -723,7 +723,7 @@

    UFLIA (Single Query Track)

    - + Vampire - fixedn 0 126349404.93940861.47312630126315851574 @@ -732,7 +732,7 @@

    UFLIA (Single Query Track)

    - + 2020-Vampiren 0 126047085.06140125.5312600126015881564 @@ -741,7 +741,7 @@

    UFLIA (Single Query Track)

    - + 2020-z3n 0 124339857.25439855.45212437123616051596 @@ -750,7 +750,7 @@

    UFLIA (Single Query Track)

    - + z3n 0 115641737.11241735.74611567114916921676 @@ -759,7 +759,7 @@

    UFLIA (Single Query Track)

    - + iProver 0 54168418.29458841.694541054123072261 @@ -768,7 +768,7 @@

    UFLIA (Single Query Track)

    - + SMTInterpol 0 32061780.28860138.172320331725282459 @@ -777,7 +777,7 @@

    UFLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 015705.4439091.635000284814 @@ -801,7 +801,6 @@

    UFLIA (Single Query Track)

    - + - diff --git a/archive/2021/results/uflia-unsat-core.html b/archive/2021/results/uflia-unsat-core.html index facfa4ad..0ec95e8e 100644 --- a/archive/2021/results/uflia-unsat-core.html +++ b/archive/2021/results/uflia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFLIA (Unsat Core Track)

    Competition results for the UFLIA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    UFLIA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5-uccvc5-uc - - + + @@ -126,7 +126,7 @@

    UFLIA (Unsat Core Track)

    - + cvc5-uc 0 923558 @@ -137,7 +137,7 @@

    UFLIA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 922036 @@ -148,7 +148,7 @@

    UFLIA (Unsat Core Track)

    - + 2020-z3n 0 870462 @@ -159,7 +159,7 @@

    UFLIA (Unsat Core Track)

    - + z3n 0 868471 @@ -170,7 +170,7 @@

    UFLIA (Unsat Core Track)

    - + Vampire 0 828021 @@ -181,7 +181,7 @@

    UFLIA (Unsat Core Track)

    - + SMTInterpol 0 597007 @@ -192,7 +192,7 @@

    UFLIA (Unsat Core Track)

    - + SMTInterpol-remus 0 496919 @@ -203,7 +203,7 @@

    UFLIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 2 2443 @@ -225,7 +225,7 @@

    UFLIA (Unsat Core Track)

    - + cvc5-uc 0 923558174736.093174683.18131 @@ -234,7 +234,7 @@

    UFLIA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 922036202213.818202170.276154 @@ -243,7 +243,7 @@

    UFLIA (Unsat Core Track)

    - + 2020-z3n 0 870462270875.024270775.718157 @@ -252,7 +252,7 @@

    UFLIA (Unsat Core Track)

    - + z3n 0 868471276369.468276314.344157 @@ -261,7 +261,7 @@

    UFLIA (Unsat Core Track)

    - + Vampire 0 832035364620.884207132.509128 @@ -270,7 +270,7 @@

    UFLIA (Unsat Core Track)

    - + SMTInterpol 0 5983631662024.2681626806.6421296 @@ -279,7 +279,7 @@

    UFLIA (Unsat Core Track)

    - + SMTInterpol-remus 0 5921032834085.172647684.6081303 @@ -288,7 +288,7 @@

    UFLIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 2 244338405.96729421.97911 @@ -312,7 +312,6 @@

    UFLIA (Unsat Core Track)

    - + - diff --git a/archive/2021/results/uflra-cloud.html b/archive/2021/results/uflra-cloud.html index d4aed970..bc4ff0da 100644 --- a/archive/2021/results/uflra-cloud.html +++ b/archive/2021/results/uflra-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFLRA (Cloud Track)

    Competition results for the UFLRA - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    UFLRA (Cloud Track)

    - + Vampire 0 02400.000022 @@ -126,7 +126,7 @@

    UFLRA (Cloud Track)

    - + cvc5-gg 2 03.91400020 @@ -146,7 +146,7 @@

    UFLRA (Cloud Track)

    - + cvc5-gg 0 00.0000200 @@ -155,7 +155,7 @@

    UFLRA (Cloud Track)

    - + Vampire 0 02400.0000202 @@ -175,7 +175,7 @@

    UFLRA (Cloud Track)

    - + Vampire 0 00.0000022 @@ -184,7 +184,7 @@

    UFLRA (Cloud Track)

    - + cvc5-gg 2 03.914000020 @@ -204,7 +204,7 @@

    UFLRA (Cloud Track)

    - + Vampire 0 048.000022 @@ -213,7 +213,7 @@

    UFLRA (Cloud Track)

    - + cvc5-gg 2 03.91400020 @@ -237,7 +237,6 @@

    UFLRA (Cloud Track)

    - + - diff --git a/archive/2021/results/uflra-incremental.html b/archive/2021/results/uflra-incremental.html index e89ef464..4c9aabb9 100644 --- a/archive/2021/results/uflra-incremental.html +++ b/archive/2021/results/uflra-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFLRA (Incremental Track)

    Competition results for the UFLRA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    UFLRA (Incremental Track)

    Parallel Performance cvc5-inc - - + + @@ -124,7 +124,7 @@

    UFLRA (Incremental Track)

    - + 2019-Z3n 0 358440151094.059150877.01723542557 @@ -133,7 +133,7 @@

    UFLRA (Incremental Track)

    - + z3n 0 356876154388.925154176.29123698963 @@ -142,7 +142,7 @@

    UFLRA (Incremental Track)

    - + 2020-z3n 0 355793154659.883154522.75823807264 @@ -151,7 +151,7 @@

    UFLRA (Incremental Track)

    - + cvc5-inc 0 13698344601.38444452.79545688220 @@ -160,7 +160,7 @@

    UFLRA (Incremental Track)

    - + SMTInterpol 0 129185377778.349371224.596464680224 @@ -169,7 +169,7 @@

    UFLRA (Incremental Track)

    - + UltimateEliminator+MathSAT 0 03876.2061724.725938650 @@ -193,7 +193,6 @@

    UFLRA (Incremental Track)

    - + - diff --git a/archive/2021/results/uflra-parallel.html b/archive/2021/results/uflra-parallel.html index ad14a878..a6b15b3e 100644 --- a/archive/2021/results/uflra-parallel.html +++ b/archive/2021/results/uflra-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFLRA (Parallel Track)

    Competition results for the UFLRA - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    UFLRA (Parallel Track)

    - + Vampire 0 02400.000022 @@ -126,7 +126,7 @@

    UFLRA (Parallel Track)

    - + cvc5-gg 2 03.87400020 @@ -146,7 +146,7 @@

    UFLRA (Parallel Track)

    - + cvc5-gg 0 00.0000200 @@ -155,7 +155,7 @@

    UFLRA (Parallel Track)

    - + Vampire 0 02400.0000202 @@ -175,7 +175,7 @@

    UFLRA (Parallel Track)

    - + Vampire 0 00.0000022 @@ -184,7 +184,7 @@

    UFLRA (Parallel Track)

    - + cvc5-gg 2 03.874000020 @@ -204,7 +204,7 @@

    UFLRA (Parallel Track)

    - + Vampire 0 048.000022 @@ -213,7 +213,7 @@

    UFLRA (Parallel Track)

    - + cvc5-gg 2 03.87400020 @@ -237,7 +237,6 @@

    UFLRA (Parallel Track)

    - + - diff --git a/archive/2021/results/uflra-single-query.html b/archive/2021/results/uflra-single-query.html index f145b85e..6cf9e1c8 100644 --- a/archive/2021/results/uflra-single-query.html +++ b/archive/2021/results/uflra-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFLRA (Single Query Track)

    Competition results for the UFLRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UFLRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5veriT— - - + + veriT - - + + veriT - + @@ -131,7 +131,7 @@

    UFLRA (Single Query Track)

    - + z3n 0 6 @@ -142,7 +142,7 @@

    UFLRA (Single Query Track)

    - + 2020-z3n 0 6 @@ -153,7 +153,7 @@

    UFLRA (Single Query Track)

    - + 2018-Z3n 0 5 @@ -164,7 +164,7 @@

    UFLRA (Single Query Track)

    - + cvc5 - fixedn 0 2 @@ -175,7 +175,7 @@

    UFLRA (Single Query Track)

    - + cvc5 0 2 @@ -186,7 +186,7 @@

    UFLRA (Single Query Track)

    - + veriT 0 2 @@ -197,7 +197,7 @@

    UFLRA (Single Query Track)

    - + SMTInterpol 0 2 @@ -208,7 +208,7 @@

    UFLRA (Single Query Track)

    - + iProver 0 2 @@ -219,7 +219,7 @@

    UFLRA (Single Query Track)

    - + 2020-CVC4n 0 2 @@ -230,7 +230,7 @@

    UFLRA (Single Query Track)

    - + 2020-Vampiren 0 2 @@ -241,7 +241,7 @@

    UFLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -252,7 +252,7 @@

    UFLRA (Single Query Track)

    - + Vampire 0 0 @@ -263,7 +263,7 @@

    UFLRA (Single Query Track)

    - + Vampire - fixedn 0 0 @@ -285,7 +285,7 @@

    UFLRA (Single Query Track)

    - + z3n 0 61431.2751431.30864211 @@ -294,7 +294,7 @@

    UFLRA (Single Query Track)

    - + 2020-z3n 0 61801.7971801.99264211 @@ -303,7 +303,7 @@

    UFLRA (Single Query Track)

    - + 2018-Z3n 0 53655.2343655.45253222 @@ -312,7 +312,7 @@

    UFLRA (Single Query Track)

    - + veriT 0 24800.3444800.34320254 @@ -321,7 +321,7 @@

    UFLRA (Single Query Track)

    - + SMTInterpol 0 24801.7924801.19920254 @@ -330,7 +330,7 @@

    UFLRA (Single Query Track)

    - + iProver 0 24883.1474822.32420254 @@ -339,7 +339,7 @@

    UFLRA (Single Query Track)

    - + 2020-CVC4n 0 26000.0346000.03320255 @@ -348,7 +348,7 @@

    UFLRA (Single Query Track)

    - + cvc5 - fixedn 0 26000.0396000.03820255 @@ -357,7 +357,7 @@

    UFLRA (Single Query Track)

    - + cvc5 0 26000.0396000.03820255 @@ -366,7 +366,7 @@

    UFLRA (Single Query Track)

    - + 2020-Vampiren 0 26006.156002.50820255 @@ -375,7 +375,7 @@

    UFLRA (Single Query Track)

    - + Vampire - fixedn 0 29627.646916.26320255 @@ -384,7 +384,7 @@

    UFLRA (Single Query Track)

    - + Vampire 0 213247.466917.90720255 @@ -393,7 +393,7 @@

    UFLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 034.30621.16600070 @@ -413,7 +413,7 @@

    UFLRA (Single Query Track)

    - + z3n 0 41431.2421431.274440121 @@ -422,7 +422,7 @@

    UFLRA (Single Query Track)

    - + 2020-z3n 0 41801.7321801.927440121 @@ -431,7 +431,7 @@

    UFLRA (Single Query Track)

    - + 2018-Z3n 0 33655.1613655.379330222 @@ -440,7 +440,7 @@

    UFLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 024.23215.307000520 @@ -449,7 +449,7 @@

    UFLRA (Single Query Track)

    - + veriT 0 04800.324800.319000524 @@ -458,7 +458,7 @@

    UFLRA (Single Query Track)

    - + SMTInterpol 0 04800.6024800.402000524 @@ -467,7 +467,7 @@

    UFLRA (Single Query Track)

    - + iProver 0 04800.5174800.765000524 @@ -476,7 +476,7 @@

    UFLRA (Single Query Track)

    - + Vampire 0 09600.135998.2000525 @@ -485,7 +485,7 @@

    UFLRA (Single Query Track)

    - + 2020-Vampiren 0 06000.06000.0000525 @@ -494,7 +494,7 @@

    UFLRA (Single Query Track)

    - + 2020-CVC4n 0 06000.06000.0000525 @@ -503,7 +503,7 @@

    UFLRA (Single Query Track)

    - + cvc5 0 06000.06000.0000525 @@ -512,7 +512,7 @@

    UFLRA (Single Query Track)

    - + Vampire - fixedn 0 06000.06000.0000525 @@ -521,7 +521,7 @@

    UFLRA (Single Query Track)

    - + cvc5 - fixedn 0 06000.06000.0000525 @@ -541,7 +541,7 @@

    UFLRA (Single Query Track)

    - + veriT 0 20.0240.023202054 @@ -550,7 +550,7 @@

    UFLRA (Single Query Track)

    - + 2020-CVC4n 0 20.0340.033202055 @@ -559,7 +559,7 @@

    UFLRA (Single Query Track)

    - + z3n 0 20.0330.033202051 @@ -568,7 +568,7 @@

    UFLRA (Single Query Track)

    - + cvc5 - fixedn 0 20.0390.038202055 @@ -577,7 +577,7 @@

    UFLRA (Single Query Track)

    - + cvc5 0 20.0390.038202055 @@ -586,7 +586,7 @@

    UFLRA (Single Query Track)

    - + 2020-z3n 0 20.0650.065202051 @@ -595,7 +595,7 @@

    UFLRA (Single Query Track)

    - + 2018-Z3n 0 20.0730.073202052 @@ -604,7 +604,7 @@

    UFLRA (Single Query Track)

    - + SMTInterpol 0 21.1910.797202054 @@ -613,7 +613,7 @@

    UFLRA (Single Query Track)

    - + 2020-Vampiren 0 26.152.508202055 @@ -622,7 +622,7 @@

    UFLRA (Single Query Track)

    - + iProver 0 282.6321.559202054 @@ -631,7 +631,7 @@

    UFLRA (Single Query Track)

    - + Vampire - fixedn 0 23627.64916.263202055 @@ -640,7 +640,7 @@

    UFLRA (Single Query Track)

    - + Vampire 0 23647.33919.707202055 @@ -649,7 +649,7 @@

    UFLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 010.0745.859000250 @@ -669,7 +669,7 @@

    UFLRA (Single Query Track)

    - + z3n 0 3106.875106.87631244 @@ -678,7 +678,7 @@

    UFLRA (Single Query Track)

    - + 2020-z3n 0 3112.035112.03631244 @@ -687,7 +687,7 @@

    UFLRA (Single Query Track)

    - + 2018-Z3n 0 3112.75112.75231244 @@ -696,7 +696,7 @@

    UFLRA (Single Query Track)

    - + veriT 0 296.34496.34320254 @@ -705,7 +705,7 @@

    UFLRA (Single Query Track)

    - + SMTInterpol 0 297.79297.19920254 @@ -714,7 +714,7 @@

    UFLRA (Single Query Track)

    - + iProver 0 2179.147118.32420254 @@ -723,7 +723,7 @@

    UFLRA (Single Query Track)

    - + 2020-CVC4n 0 2120.034120.03320255 @@ -732,7 +732,7 @@

    UFLRA (Single Query Track)

    - + cvc5 - fixedn 0 2120.039120.03820255 @@ -741,7 +741,7 @@

    UFLRA (Single Query Track)

    - + cvc5 0 2120.039120.03820255 @@ -750,7 +750,7 @@

    UFLRA (Single Query Track)

    - + 2020-Vampiren 0 2126.15122.50820255 @@ -759,7 +759,7 @@

    UFLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 034.30621.16600070 @@ -768,7 +768,7 @@

    UFLRA (Single Query Track)

    - + Vampire 0 0168.0168.000077 @@ -777,7 +777,7 @@

    UFLRA (Single Query Track)

    - + Vampire - fixedn 0 0168.0168.000077 @@ -801,7 +801,6 @@

    UFLRA (Single Query Track)

    - + - diff --git a/archive/2021/results/uflra-unsat-core.html b/archive/2021/results/uflra-unsat-core.html index f5efa792..e142dea6 100644 --- a/archive/2021/results/uflra-unsat-core.html +++ b/archive/2021/results/uflra-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFLRA (Unsat Core Track)

    Competition results for the UFLRA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    UFLRA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5-uccvc5-uc - - + + @@ -126,7 +126,7 @@

    UFLRA (Unsat Core Track)

    - + z3n 0 16 @@ -137,7 +137,7 @@

    UFLRA (Unsat Core Track)

    - + cvc5-uc 0 16 @@ -148,7 +148,7 @@

    UFLRA (Unsat Core Track)

    - + 2020-z3n 0 16 @@ -159,7 +159,7 @@

    UFLRA (Unsat Core Track)

    - + SMTInterpol 0 16 @@ -170,7 +170,7 @@

    UFLRA (Unsat Core Track)

    - + SMTInterpol-remus 0 16 @@ -181,7 +181,7 @@

    UFLRA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 14 @@ -192,7 +192,7 @@

    UFLRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -203,7 +203,7 @@

    UFLRA (Unsat Core Track)

    - + Vampire 0 0 @@ -225,7 +225,7 @@

    UFLRA (Unsat Core Track)

    - + z3n 0 160.1620.1820 @@ -234,7 +234,7 @@

    UFLRA (Unsat Core Track)

    - + cvc5-uc 0 160.1990.1940 @@ -243,7 +243,7 @@

    UFLRA (Unsat Core Track)

    - + 2020-z3n 0 160.3320.3460 @@ -252,7 +252,7 @@

    UFLRA (Unsat Core Track)

    - + SMTInterpol 0 166.3174.2380 @@ -261,7 +261,7 @@

    UFLRA (Unsat Core Track)

    - + SMTInterpol-remus 0 16220.278186.1550 @@ -270,7 +270,7 @@

    UFLRA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 140.1740.2650 @@ -279,7 +279,7 @@

    UFLRA (Unsat Core Track)

    - + Vampire 0 23651.03921.510 @@ -288,7 +288,7 @@

    UFLRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 048.31328.9160 @@ -312,7 +312,6 @@

    UFLRA (Unsat Core Track)

    - + - diff --git a/archive/2021/results/ufnia-cloud.html b/archive/2021/results/ufnia-cloud.html index 85cd0af2..b36f70df 100644 --- a/archive/2021/results/ufnia-cloud.html +++ b/archive/2021/results/ufnia-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFNIA (Cloud Track)

    Competition results for the UFNIA - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    UFNIA (Cloud Track)

    - + cvc5-gg 0 83623.68680883 @@ -126,7 +126,7 @@

    UFNIA (Cloud Track)

    - + Vampire 0 018000.00001515 @@ -135,7 +135,7 @@

    UFNIA (Cloud Track)

    - + Par4n 0 019200.00001616 @@ -155,7 +155,7 @@

    UFNIA (Cloud Track)

    - + cvc5-gg 0 00.00000163 @@ -164,7 +164,7 @@

    UFNIA (Cloud Track)

    - + Par4n 0 00.000001616 @@ -173,7 +173,7 @@

    UFNIA (Cloud Track)

    - + Vampire 0 00.000001615 @@ -193,7 +193,7 @@

    UFNIA (Cloud Track)

    - + cvc5-gg 0 823.686808173 @@ -202,7 +202,7 @@

    UFNIA (Cloud Track)

    - + Par4n 0 010800.00009716 @@ -211,7 +211,7 @@

    UFNIA (Cloud Track)

    - + Vampire 0 010800.00009715 @@ -231,7 +231,7 @@

    UFNIA (Cloud Track)

    - + cvc5-gg 0 895.68680883 @@ -240,7 +240,7 @@

    UFNIA (Cloud Track)

    - + Vampire 0 0360.00001515 @@ -249,7 +249,7 @@

    UFNIA (Cloud Track)

    - + Par4n 0 0384.00001616 @@ -273,7 +273,6 @@

    UFNIA (Cloud Track)

    - + - diff --git a/archive/2021/results/ufnia-incremental.html b/archive/2021/results/ufnia-incremental.html index 3f777ece..6b8034e3 100644 --- a/archive/2021/results/ufnia-incremental.html +++ b/archive/2021/results/ufnia-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFNIA (Incremental Track)

    Competition results for the UFNIA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    UFNIA (Incremental Track)

    Parallel Performance cvc5-inc - - + + @@ -124,7 +124,7 @@

    UFNIA (Incremental Track)

    - + 2020-z3n 0 887231371089.1241371476.2432579881011 @@ -133,7 +133,7 @@

    UFNIA (Incremental Track)

    - + z3n 0 886601369251.9341369623.3112580511005 @@ -142,7 +142,7 @@

    UFNIA (Incremental Track)

    - + cvc5-inc 0 331811006156.0161006033.357313530821 @@ -151,7 +151,7 @@

    UFNIA (Incremental Track)

    - + 2020-CVC4-incn 0 29859971655.327971548.538316852797 @@ -160,7 +160,7 @@

    UFNIA (Incremental Track)

    - + SMTInterpol 0 154851362244.6971346276.6843312261088 @@ -169,7 +169,7 @@

    UFNIA (Incremental Track)

    - + UltimateEliminator+MathSAT 0 08901.8843929.8553467110 @@ -193,7 +193,6 @@

    UFNIA (Incremental Track)

    - + - diff --git a/archive/2021/results/ufnia-parallel.html b/archive/2021/results/ufnia-parallel.html index e28f7d21..12aa9a82 100644 --- a/archive/2021/results/ufnia-parallel.html +++ b/archive/2021/results/ufnia-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFNIA (Parallel Track)

    Competition results for the UFNIA - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    UFNIA (Parallel Track)

    - + cvc5-gg 0 101623.9481001071 @@ -126,7 +126,7 @@

    UFNIA (Parallel Track)

    - + Par4n 0 020400.00001717 @@ -135,7 +135,7 @@

    UFNIA (Parallel Track)

    - + Vampire 0 020400.00001717 @@ -155,7 +155,7 @@

    UFNIA (Parallel Track)

    - + cvc5-gg 0 00.00000171 @@ -164,7 +164,7 @@

    UFNIA (Parallel Track)

    - + Par4n 0 00.000001717 @@ -173,7 +173,7 @@

    UFNIA (Parallel Track)

    - + Vampire 0 00.000001717 @@ -193,7 +193,7 @@

    UFNIA (Parallel Track)

    - + cvc5-gg 0 10423.94810010071 @@ -202,7 +202,7 @@

    UFNIA (Parallel Track)

    - + Par4n 0 012000.000010717 @@ -211,7 +211,7 @@

    UFNIA (Parallel Track)

    - + Vampire 0 012000.000010717 @@ -231,7 +231,7 @@

    UFNIA (Parallel Track)

    - + cvc5-gg 0 895.46280893 @@ -240,7 +240,7 @@

    UFNIA (Parallel Track)

    - + Par4n 0 0408.00001717 @@ -249,7 +249,7 @@

    UFNIA (Parallel Track)

    - + Vampire 0 0408.00001717 @@ -273,7 +273,6 @@

    UFNIA (Parallel Track)

    - + - diff --git a/archive/2021/results/ufnia-single-query.html b/archive/2021/results/ufnia-single-query.html index a5933681..067598c3 100644 --- a/archive/2021/results/ufnia-single-query.html +++ b/archive/2021/results/ufnia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFNIA (Single Query Track)

    Competition results for the UFNIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UFNIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) VampireiProverUltimateEliminator+MathSAT - - + + cvc5 - - + + Vampire - + @@ -131,7 +131,7 @@

    UFNIA (Single Query Track)

    - + 2020-CVC4n 0 3520 @@ -142,7 +142,7 @@

    UFNIA (Single Query Track)

    - + cvc5 - fixedn 0 3512 @@ -153,7 +153,7 @@

    UFNIA (Single Query Track)

    - + z3n 0 2762 @@ -164,7 +164,7 @@

    UFNIA (Single Query Track)

    - + Vampire 0 2528 @@ -175,7 +175,7 @@

    UFNIA (Single Query Track)

    - + Vampire - fixedn 0 2500 @@ -186,7 +186,7 @@

    UFNIA (Single Query Track)

    - + 2020-Vampiren 0 2143 @@ -197,7 +197,7 @@

    UFNIA (Single Query Track)

    - + iProver 0 1264 @@ -208,7 +208,7 @@

    UFNIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 520 @@ -219,7 +219,7 @@

    UFNIA (Single Query Track)

    - + SMTInterpol 0 176 @@ -230,7 +230,7 @@

    UFNIA (Single Query Track)

    - + cvc5 5 3520 @@ -241,7 +241,7 @@

    UFNIA (Single Query Track)

    - + 2019-Par4n 7 3763 @@ -263,7 +263,7 @@

    UFNIA (Single Query Track)

    - + 2020-CVC4n 0 35203337547.3243341902.9063520715280527582666 @@ -272,7 +272,7 @@

    UFNIA (Single Query Track)

    - + cvc5 - fixedn 0 35123322492.1423323517.6073512699281327662724 @@ -281,7 +281,7 @@

    UFNIA (Single Query Track)

    - + z3n 0 27622644440.3022645069.8872762636212635161847 @@ -290,7 +290,7 @@

    UFNIA (Single Query Track)

    - + Vampire - fixedn 0 25424939315.2164400543.41225420254237363538 @@ -299,7 +299,7 @@

    UFNIA (Single Query Track)

    - + 2020-Vampiren 0 23285616578.6934805726.84623280232839503801 @@ -308,7 +308,7 @@

    UFNIA (Single Query Track)

    - + iProver 0 14596454098.6875948793.22114590145948194780 @@ -317,7 +317,7 @@

    UFNIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 520847225.283833451.9725203341865758657 @@ -326,7 +326,7 @@

    UFNIA (Single Query Track)

    - + SMTInterpol 0 17618263.13114391.29176839361029 @@ -335,7 +335,7 @@

    UFNIA (Single Query Track)

    - + cvc5 5 35203307285.2073308015.8033520702281827582708 @@ -344,7 +344,7 @@

    UFNIA (Single Query Track)

    - + 2019-Par4n 7 37833248749.2783119645.5823783735304824952487 @@ -353,7 +353,7 @@

    UFNIA (Single Query Track)

    - + Vampire 26 25854958463.8544379504.83225850258536933500 @@ -373,7 +373,7 @@

    UFNIA (Single Query Track)

    - + 2020-CVC4n 0 71547933.45848173.52971571503355302666 @@ -382,7 +382,7 @@

    UFNIA (Single Query Track)

    - + cvc5 - fixedn 0 69956441.40456619.65969969904955302724 @@ -391,7 +391,7 @@

    UFNIA (Single Query Track)

    - + z3n 0 636102207.453102206.752636636011255301847 @@ -400,7 +400,7 @@

    UFNIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 334445314.03444404.61933433404145530657 @@ -409,7 +409,7 @@

    UFNIA (Single Query Track)

    - + SMTInterpol 0 839957.0449873.0818383066555309 @@ -418,7 +418,7 @@

    UFNIA (Single Query Track)

    - + Vampire 0 0880800.73866383.1600074855303500 @@ -427,7 +427,7 @@

    UFNIA (Single Query Track)

    - + iProver 0 0890456.643890425.75900074855304780 @@ -436,7 +436,7 @@

    UFNIA (Single Query Track)

    - + Vampire - fixedn 0 0922823.68897555.6500074855303538 @@ -445,7 +445,7 @@

    UFNIA (Single Query Track)

    - + 2020-Vampiren 0 0912000.95897581.3900074855303801 @@ -454,7 +454,7 @@

    UFNIA (Single Query Track)

    - + cvc5 5 70254769.03654853.31570270204655302708 @@ -463,7 +463,7 @@

    UFNIA (Single Query Track)

    - + 2019-Par4n 7 73548568.50632338.1773573501355302487 @@ -483,7 +483,7 @@

    UFNIA (Single Query Track)

    - + 2019-Par4n 0 3048588980.772476107.41230480304830629242487 @@ -492,7 +492,7 @@

    UFNIA (Single Query Track)

    - + cvc5 0 2818649709.141650339.77728180281853629242708 @@ -501,7 +501,7 @@

    UFNIA (Single Query Track)

    - + cvc5 - fixedn 0 2813663150.451663991.92628130281354129242724 @@ -510,7 +510,7 @@

    UFNIA (Single Query Track)

    - + 2020-CVC4n 0 2805704792.757708907.62228050280554929242666 @@ -519,7 +519,7 @@

    UFNIA (Single Query Track)

    - + Vampire - fixedn 0 25421359090.376891978.45225420254281229243538 @@ -528,7 +528,7 @@

    UFNIA (Single Query Track)

    - + 2020-Vampiren 0 23282032417.0731298847.904232802328102629243801 @@ -537,7 +537,7 @@

    UFNIA (Single Query Track)

    - + z3n 0 2126850807.627850689.919212602126122829241847 @@ -546,7 +546,7 @@

    UFNIA (Single Query Track)

    - + iProver 0 14592952442.0442447167.462145901459189529244780 @@ -555,7 +555,7 @@

    UFNIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 186345936.515337367.058186018631682924657 @@ -564,7 +564,7 @@

    UFNIA (Single Query Track)

    - + SMTInterpol 0 936000.1152548.82993093326129249 @@ -573,7 +573,7 @@

    UFNIA (Single Query Track)

    - + Vampire 26 25851430461.164901953.11225850258576929243500 @@ -593,7 +593,7 @@

    UFNIA (Single Query Track)

    - + cvc5 - fixedn 0 307480434.80180375.2443074678239632043201 @@ -602,7 +602,7 @@

    UFNIA (Single Query Track)

    - + 2020-CVC4n 0 286085178.72285095.0222860622223834183414 @@ -611,7 +611,7 @@

    UFNIA (Single Query Track)

    - + z3n 0 261581823.12581740.1542615609200636633139 @@ -620,7 +620,7 @@

    UFNIA (Single Query Track)

    - + Vampire 0 1704134619.757116199.4917040170445744574 @@ -629,7 +629,7 @@

    UFNIA (Single Query Track)

    - + Vampire - fixedn 0 1658134244.553116553.53816580165846204597 @@ -638,7 +638,7 @@

    UFNIA (Single Query Track)

    - + 2020-Vampiren 0 1535126795.211116844.34515350153547434719 @@ -647,7 +647,7 @@

    UFNIA (Single Query Track)

    - + iProver 0 1055141798.414129979.56310550105552235184 @@ -656,7 +656,7 @@

    UFNIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 45248054.56336361.1854522711815826791 @@ -665,7 +665,7 @@

    UFNIA (Single Query Track)

    - + SMTInterpol 0 1754733.8622952.4551758293610311 @@ -674,7 +674,7 @@

    UFNIA (Single Query Track)

    - + cvc5 2 307980290.4280233.7123079681239831993193 @@ -683,7 +683,7 @@

    UFNIA (Single Query Track)

    - + 2019-Par4n 7 320480126.49977171.1763204676252830743066 @@ -707,7 +707,6 @@

    UFNIA (Single Query Track)

    - + - diff --git a/archive/2021/results/ufnia-unsat-core.html b/archive/2021/results/ufnia-unsat-core.html index 0f3a6206..b3e530b0 100644 --- a/archive/2021/results/ufnia-unsat-core.html +++ b/archive/2021/results/ufnia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFNIA (Unsat Core Track)

    Competition results for the UFNIA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    UFNIA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5-uccvc5-uc - - + + @@ -126,7 +126,7 @@

    UFNIA (Unsat Core Track)

    - + cvc5-uc 0 90396 @@ -137,7 +137,7 @@

    UFNIA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 90245 @@ -148,7 +148,7 @@

    UFNIA (Unsat Core Track)

    - + Vampire 0 82486 @@ -159,7 +159,7 @@

    UFNIA (Unsat Core Track)

    - + z3n 0 66551 @@ -170,7 +170,7 @@

    UFNIA (Unsat Core Track)

    - + SMTInterpol 0 326 @@ -181,7 +181,7 @@

    UFNIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 90 @@ -203,7 +203,7 @@

    UFNIA (Unsat Core Track)

    - + cvc5-uc 0 90396418253.998418162.912298 @@ -212,7 +212,7 @@

    UFNIA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 90245316533.709316483.919227 @@ -221,7 +221,7 @@

    UFNIA (Unsat Core Track)

    - + Vampire 0 84313451111.926223598.748120 @@ -230,7 +230,7 @@

    UFNIA (Unsat Core Track)

    - + z3n 0 66551163521.843163891.70488 @@ -239,7 +239,7 @@

    UFNIA (Unsat Core Track)

    - + SMTInterpol 0 3262334.5021064.1270 @@ -248,7 +248,7 @@

    UFNIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 90319180.665311344.633228 @@ -272,7 +272,6 @@

    UFNIA (Unsat Core Track)

    - + - diff --git a/archive/2021/results/ufnra-incremental.html b/archive/2021/results/ufnra-incremental.html index 4bd5c609..d2ef3c90 100644 --- a/archive/2021/results/ufnra-incremental.html +++ b/archive/2021/results/ufnra-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -82,9 +82,9 @@

    UFNRA (Incremental Track)

    Competition results for the UFNRA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    UFNRA (Incremental Track)

    Parallel Performance cvc5-inc - - + + @@ -124,7 +124,7 @@

    UFNRA (Incremental Track)

    - + z3n 0 2092.7882.77600 @@ -133,7 +133,7 @@

    UFNRA (Incremental Track)

    - + 2020-z3n 0 2092.8852.87300 @@ -142,7 +142,7 @@

    UFNRA (Incremental Track)

    - + cvc5-inc 0 50.5520.5362040 @@ -151,7 +151,7 @@

    UFNRA (Incremental Track)

    - + 2020-CVC4-incn 0 50.570.5542040 @@ -160,7 +160,7 @@

    UFNRA (Incremental Track)

    - + SMTInterpol 0 24800.04800.02074 @@ -169,7 +169,7 @@

    UFNRA (Incremental Track)

    - + UltimateEliminator+MathSAT 0 016.77.4212090 @@ -193,7 +193,6 @@

    UFNRA (Incremental Track)

    - + - diff --git a/archive/2021/specs.html b/archive/2021/specs.html index e36e184f..8a998d09 100644 --- a/archive/2021/specs.html +++ b/archive/2021/specs.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -96,7 +96,6 @@

    Machine Specifications

    - + - diff --git a/archive/2021/system-descriptions/mc2.html b/archive/2021/system-descriptions/mc2.html index 702b3b06..41eb7480 100644 --- a/archive/2021/system-descriptions/mc2.html +++ b/archive/2021/system-descriptions/mc2.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -124,7 +124,6 @@

    - + - diff --git a/archive/2021/tools.html b/archive/2021/tools.html index 18485718..c28d2a09 100644 --- a/archive/2021/tools.html +++ b/archive/2021/tools.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2021 Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -70,8 +70,8 @@

    Tools

    Pre-Processor (Benchmark Scrambler)

    GitHub Repository
    Sources

    SMT-COMP 2021 Releases

      - - + +
    • Single Query Track
      • Binary @@ -79,8 +79,8 @@

        SMT-COMP 2021 Releases

        available on StarExec as SMT-COMP 2021 Single-Query Scrambler (id: 708)
    • - - + +
    • Incremental Track
      • Binary @@ -88,8 +88,8 @@

        SMT-COMP 2021 Releases

        available on StarExec as SMT-COMP 2021 Incremental Scrambler (id: 709)
    • - - + +
    • Unsat Core Track
      • Binary @@ -97,8 +97,8 @@

        SMT-COMP 2021 Releases

        available on StarExec as SMT-COMP 2021 Unsat-Core Scrambler (id: 711)
    • - - + +
    • Model Validation Track
      • Binary @@ -106,15 +106,15 @@

        SMT-COMP 2021 Releases

        available on StarExec as SMT-COMP 2021 Model-Validation Scrambler (id: 710)
    • - - + +

    Post-Processor

    GitHub Repository
    Sources

    SMT-COMP 2021 Releases

      - - + +
    • Single Query Track
      • Binary @@ -122,8 +122,8 @@

        SMT-COMP 2021 Releases

        available on StarExec as SMT-COMP 2021 Single Query (id: 692)
    • - - + +
    • Incremental Track
      • Binary @@ -131,8 +131,8 @@

        SMT-COMP 2021 Releases

        available on StarExec as SMT-COMP 2021 Incremental (id: 691)
    • - - + +
    • Unsat Core Track
      • Binary @@ -140,8 +140,8 @@

        SMT-COMP 2021 Releases

        available on StarExec as SMT-COMP 2021 Unsat Core (id: 727)
    • - - + +
    • Model Validation Track
      • Binary @@ -149,8 +149,8 @@

        SMT-COMP 2021 Releases

        available on StarExec as SMT-COMP 2021 Model-Validation (id: 672)
    • - - + +

    Trace executor

    GitHub Repository
    Sources
    Binary
    @@ -158,7 +158,7 @@

    Trace executor

    G
    All solvers wrapped with the Trace executor are available
    here.

    - +

    Competition scripts

    GitHub Repository
    @@ -174,7 +174,6 @@

    Competition scripts

    Gi - + - diff --git a/archive/2022/benchmarks.html b/archive/2022/benchmarks.html index 9d1f3abc..a1ebbfaf 100644 --- a/archive/2022/benchmarks.html +++ b/archive/2022/benchmarks.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -724,7 +724,6 @@

    Benchmarks

    - + - diff --git a/archive/2022/comparisons.html b/archive/2022/comparisons.html index 255fd9b8..a24f3577 100644 --- a/archive/2022/comparisons.html +++ b/archive/2022/comparisons.html @@ -48,7 +48,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -69,7 +69,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -209,7 +209,6 @@

    SMT-COMP 2022 - + - diff --git a/archive/2022/divisions/arith.html b/archive/2022/divisions/arith.html index 456b1b20..1946e0e7 100644 --- a/archive/2022/divisions/arith.html +++ b/archive/2022/divisions/arith.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -102,7 +102,6 @@

    Tracks

    - + - diff --git a/archive/2022/divisions/bitvec.html b/archive/2022/divisions/bitvec.html index b5a3220a..7c1573d3 100644 --- a/archive/2022/divisions/bitvec.html +++ b/archive/2022/divisions/bitvec.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -102,7 +102,6 @@

    Tracks

    - + - diff --git a/archive/2022/divisions/equality-lineararith.html b/archive/2022/divisions/equality-lineararith.html index 2adfcd81..1329d294 100644 --- a/archive/2022/divisions/equality-lineararith.html +++ b/archive/2022/divisions/equality-lineararith.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -102,7 +102,6 @@

    Tracks

    - + - diff --git a/archive/2022/divisions/equality-machinearith.html b/archive/2022/divisions/equality-machinearith.html index 8d248734..0afb1222 100644 --- a/archive/2022/divisions/equality-machinearith.html +++ b/archive/2022/divisions/equality-machinearith.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -102,7 +102,6 @@

    Tracks

    - + - diff --git a/archive/2022/divisions/equality-nonlineararith.html b/archive/2022/divisions/equality-nonlineararith.html index 6ba7beeb..e4814ed8 100644 --- a/archive/2022/divisions/equality-nonlineararith.html +++ b/archive/2022/divisions/equality-nonlineararith.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -102,7 +102,6 @@

    Tracks

    - + - diff --git a/archive/2022/divisions/equality.html b/archive/2022/divisions/equality.html index 21c0ba8b..fb229c0a 100644 --- a/archive/2022/divisions/equality.html +++ b/archive/2022/divisions/equality.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -102,7 +102,6 @@

    Tracks

    - + - diff --git a/archive/2022/divisions/fparith.html b/archive/2022/divisions/fparith.html index 1640dea2..c3d8ef5e 100644 --- a/archive/2022/divisions/fparith.html +++ b/archive/2022/divisions/fparith.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -102,7 +102,6 @@

    Tracks

    - + - diff --git a/archive/2022/divisions/qf-bitvec.html b/archive/2022/divisions/qf-bitvec.html index cf0e4ad8..5a3f65e7 100644 --- a/archive/2022/divisions/qf-bitvec.html +++ b/archive/2022/divisions/qf-bitvec.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -105,7 +105,6 @@

    Tracks

    - + - diff --git a/archive/2022/divisions/qf-datatypes.html b/archive/2022/divisions/qf-datatypes.html index 39f2ddcd..48565931 100644 --- a/archive/2022/divisions/qf-datatypes.html +++ b/archive/2022/divisions/qf-datatypes.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -99,7 +99,6 @@

    Tracks

    - + - diff --git a/archive/2022/divisions/qf-equality-bitvec-arith.html b/archive/2022/divisions/qf-equality-bitvec-arith.html index 06be7943..1d1ec484 100644 --- a/archive/2022/divisions/qf-equality-bitvec-arith.html +++ b/archive/2022/divisions/qf-equality-bitvec-arith.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -87,7 +87,6 @@

    Tracks

    - + - diff --git a/archive/2022/divisions/qf-equality-bitvec.html b/archive/2022/divisions/qf-equality-bitvec.html index b10be760..b3c8f0e6 100644 --- a/archive/2022/divisions/qf-equality-bitvec.html +++ b/archive/2022/divisions/qf-equality-bitvec.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -105,7 +105,6 @@

    Tracks

    - + - diff --git a/archive/2022/divisions/qf-equality-lineararith.html b/archive/2022/divisions/qf-equality-lineararith.html index 97098557..eb5bb41f 100644 --- a/archive/2022/divisions/qf-equality-lineararith.html +++ b/archive/2022/divisions/qf-equality-lineararith.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -105,7 +105,6 @@

    Tracks

    - + - diff --git a/archive/2022/divisions/qf-equality-nonlineararith.html b/archive/2022/divisions/qf-equality-nonlineararith.html index be690227..ade01385 100644 --- a/archive/2022/divisions/qf-equality-nonlineararith.html +++ b/archive/2022/divisions/qf-equality-nonlineararith.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -102,7 +102,6 @@

    Tracks

    - + - diff --git a/archive/2022/divisions/qf-equality.html b/archive/2022/divisions/qf-equality.html index f32c1809..cb05088b 100644 --- a/archive/2022/divisions/qf-equality.html +++ b/archive/2022/divisions/qf-equality.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -105,7 +105,6 @@

    Tracks

    - + - diff --git a/archive/2022/divisions/qf-fparith.html b/archive/2022/divisions/qf-fparith.html index bb79be22..d026422e 100644 --- a/archive/2022/divisions/qf-fparith.html +++ b/archive/2022/divisions/qf-fparith.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -105,7 +105,6 @@

    Tracks

    - + - diff --git a/archive/2022/divisions/qf-linearintarith.html b/archive/2022/divisions/qf-linearintarith.html index 2d0413ce..dda0d98d 100644 --- a/archive/2022/divisions/qf-linearintarith.html +++ b/archive/2022/divisions/qf-linearintarith.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -105,7 +105,6 @@

    Tracks

    - + - diff --git a/archive/2022/divisions/qf-linearrealarith.html b/archive/2022/divisions/qf-linearrealarith.html index 732c01b8..f04f0deb 100644 --- a/archive/2022/divisions/qf-linearrealarith.html +++ b/archive/2022/divisions/qf-linearrealarith.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -105,7 +105,6 @@

    Tracks

    - + - diff --git a/archive/2022/divisions/qf-nonlinearintarith.html b/archive/2022/divisions/qf-nonlinearintarith.html index 2c4d43ff..22aa633b 100644 --- a/archive/2022/divisions/qf-nonlinearintarith.html +++ b/archive/2022/divisions/qf-nonlinearintarith.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -102,7 +102,6 @@

    Tracks

    - + - diff --git a/archive/2022/divisions/qf-nonlinearrealarith.html b/archive/2022/divisions/qf-nonlinearrealarith.html index 7b29e0d4..14581b73 100644 --- a/archive/2022/divisions/qf-nonlinearrealarith.html +++ b/archive/2022/divisions/qf-nonlinearrealarith.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -99,7 +99,6 @@

    Tracks

    - + - diff --git a/archive/2022/divisions/qf-strings.html b/archive/2022/divisions/qf-strings.html index 7cf97de0..851c7bac 100644 --- a/archive/2022/divisions/qf-strings.html +++ b/archive/2022/divisions/qf-strings.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -99,7 +99,6 @@

    Tracks

    - + - diff --git a/archive/2022/index.html b/archive/2022/index.html index 2dc3a643..c66cc1ae 100644 --- a/archive/2022/index.html +++ b/archive/2022/index.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -135,7 +135,6 @@

    Acknowledgment

    - + - diff --git a/archive/2022/news/2022-02-09-call-for-benchmark.html b/archive/2022/news/2022-02-09-call-for-benchmark.html index 33ae9942..a677be16 100644 --- a/archive/2022/news/2022-02-09-call-for-benchmark.html +++ b/archive/2022/news/2022-02-09-call-for-benchmark.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -98,7 +98,6 @@

    Call for benchmarks

    - + - diff --git a/archive/2022/news/2022-02-22-call-for-comments.html b/archive/2022/news/2022-02-22-call-for-comments.html index 8eb694e6..390edc3c 100644 --- a/archive/2022/news/2022-02-22-call-for-comments.html +++ b/archive/2022/news/2022-02-22-call-for-comments.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -158,7 +158,6 @@

    Call for comments

    - + - diff --git a/archive/2022/news/2022-03-22-call-for-solvers.html b/archive/2022/news/2022-03-22-call-for-solvers.html index 59ba705b..64434b31 100644 --- a/archive/2022/news/2022-03-22-call-for-solvers.html +++ b/archive/2022/news/2022-03-22-call-for-solvers.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -169,7 +169,6 @@

    First Call for Solvers

    - + - diff --git a/archive/2022/news/2022-06-01-final-call-for-solvers.html b/archive/2022/news/2022-06-01-final-call-for-solvers.html index 37f8dff6..b5977db3 100644 --- a/archive/2022/news/2022-06-01-final-call-for-solvers.html +++ b/archive/2022/news/2022-06-01-final-call-for-solvers.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -158,7 +158,6 @@

    Final Call for Solvers

    - + - diff --git a/archive/2022/news/2022-08-10-competition-results.html b/archive/2022/news/2022-08-10-competition-results.html index c2ba93fc..353fb557 100644 --- a/archive/2022/news/2022-08-10-competition-results.html +++ b/archive/2022/news/2022-08-10-competition-results.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -80,7 +80,6 @@

    Competition results

    - + - diff --git a/archive/2022/parallel-and-cloud-tracks.html b/archive/2022/parallel-and-cloud-tracks.html index e0fda250..dcfc0cc9 100644 --- a/archive/2022/parallel-and-cloud-tracks.html +++ b/archive/2022/parallel-and-cloud-tracks.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -142,7 +142,6 @@

    Solver Submission to Par - + - diff --git a/archive/2022/participants.html b/archive/2022/participants.html index e73dd697..7d8e4b77 100644 --- a/archive/2022/participants.html +++ b/archive/2022/participants.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -3693,7 +3693,6 @@

    QF_Strings

    - + - diff --git a/archive/2022/participants/2018-mathsat-incremental.html b/archive/2022/participants/2018-mathsat-incremental.html index 343419f1..da115987 100644 --- a/archive/2022/participants/2018-mathsat-incremental.html +++ b/archive/2022/participants/2018-mathsat-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -144,8 +144,6 @@

    2018-MathSAT-incremental

    - + - - diff --git a/archive/2022/participants/2019-cvc4-inc.html b/archive/2022/participants/2019-cvc4-inc.html index 3a4489ae..0a4bf352 100644 --- a/archive/2022/participants/2019-cvc4-inc.html +++ b/archive/2022/participants/2019-cvc4-inc.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -144,8 +144,6 @@

    2019-CVC4-inc

    - + - - diff --git a/archive/2022/participants/2019-par4.html b/archive/2022/participants/2019-par4.html index 39604647..8813768e 100644 --- a/archive/2022/participants/2019-par4.html +++ b/archive/2022/participants/2019-par4.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -144,8 +144,6 @@

    2019-Par4

    - + - - diff --git a/archive/2022/participants/2019-z3.html b/archive/2022/participants/2019-z3.html index 22354a47..f3951aaa 100644 --- a/archive/2022/participants/2019-z3.html +++ b/archive/2022/participants/2019-z3.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -144,8 +144,6 @@

    2019-Z3

    - + - - diff --git a/archive/2022/participants/2020-bitwuzla-fixed.html b/archive/2022/participants/2020-bitwuzla-fixed.html index 1044cef5..1fd359e3 100644 --- a/archive/2022/participants/2020-bitwuzla-fixed.html +++ b/archive/2022/participants/2020-bitwuzla-fixed.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -144,8 +144,6 @@

    2020-Bitwuzla-fixed

    - + - - diff --git a/archive/2022/participants/2020-bitwuzla.html b/archive/2022/participants/2020-bitwuzla.html index 522b8df2..60e4286f 100644 --- a/archive/2022/participants/2020-bitwuzla.html +++ b/archive/2022/participants/2020-bitwuzla.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -144,8 +144,6 @@

    2020-Bitwuzla

    - + - - diff --git a/archive/2022/participants/2020-cvc4-uc.html b/archive/2022/participants/2020-cvc4-uc.html index dba1ea11..2bb3f1dc 100644 --- a/archive/2022/participants/2020-cvc4-uc.html +++ b/archive/2022/participants/2020-cvc4-uc.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -144,8 +144,6 @@

    2020-CVC4-uc

    - + - - diff --git a/archive/2022/participants/2020-cvc4.html b/archive/2022/participants/2020-cvc4.html index 544d1d73..82c0383a 100644 --- a/archive/2022/participants/2020-cvc4.html +++ b/archive/2022/participants/2020-cvc4.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -144,8 +144,6 @@

    2020-CVC4

    - + - - diff --git a/archive/2022/participants/2020-yices2-incremental.html b/archive/2022/participants/2020-yices2-incremental.html index ef0d15d2..f087b3ca 100644 --- a/archive/2022/participants/2020-yices2-incremental.html +++ b/archive/2022/participants/2020-yices2-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -144,8 +144,6 @@

    2020-Yices2 incremental

    - + - - diff --git a/archive/2022/participants/2020-yices2.html b/archive/2022/participants/2020-yices2.html index ee35def7..d9883d8e 100644 --- a/archive/2022/participants/2020-yices2.html +++ b/archive/2022/participants/2020-yices2.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -144,8 +144,6 @@

    2020-Yices2

    - + - - diff --git a/archive/2022/participants/2020-z3.html b/archive/2022/participants/2020-z3.html index 8ec9ea30..c21241db 100644 --- a/archive/2022/participants/2020-z3.html +++ b/archive/2022/participants/2020-z3.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -144,8 +144,6 @@

    2020-z3

    - + - - diff --git a/archive/2022/participants/2021-bitwuzla-fixed.html b/archive/2022/participants/2021-bitwuzla-fixed.html index b3e4bb4b..0fb9e4f8 100644 --- a/archive/2022/participants/2021-bitwuzla-fixed.html +++ b/archive/2022/participants/2021-bitwuzla-fixed.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -144,8 +144,6 @@

    2021-Bitwuzla - fixed

    - + - - diff --git a/archive/2022/participants/2021-bitwuzla.html b/archive/2022/participants/2021-bitwuzla.html index a5f42dca..9654222c 100644 --- a/archive/2022/participants/2021-bitwuzla.html +++ b/archive/2022/participants/2021-bitwuzla.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -144,8 +144,6 @@

    2021-Bitwuzla

    - + - - diff --git a/archive/2022/participants/2021-cvc5-inc.html b/archive/2022/participants/2021-cvc5-inc.html index 56aaaf5c..37205e7b 100644 --- a/archive/2022/participants/2021-cvc5-inc.html +++ b/archive/2022/participants/2021-cvc5-inc.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -144,8 +144,6 @@

    2021-cvc5-inc

    - + - - diff --git a/archive/2022/participants/2021-cvc5-uc.html b/archive/2022/participants/2021-cvc5-uc.html index 18baaea6..2354bcc8 100644 --- a/archive/2022/participants/2021-cvc5-uc.html +++ b/archive/2022/participants/2021-cvc5-uc.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -144,8 +144,6 @@

    2021-cvc5-uc

    - + - - diff --git a/archive/2022/participants/2021-cvc5.html b/archive/2022/participants/2021-cvc5.html index 68fbff70..0b631daa 100644 --- a/archive/2022/participants/2021-cvc5.html +++ b/archive/2022/participants/2021-cvc5.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -144,8 +144,6 @@

    2021-cvc5

    - + - - diff --git a/archive/2022/participants/2021-mathsat5.html b/archive/2022/participants/2021-mathsat5.html index a68950dc..0e8cddf0 100644 --- a/archive/2022/participants/2021-mathsat5.html +++ b/archive/2022/participants/2021-mathsat5.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -144,8 +144,6 @@

    2021-MathSAT5

    - + - - diff --git a/archive/2022/participants/2021-smtinterpol.html b/archive/2022/participants/2021-smtinterpol.html index efe3e959..9bdc8ead 100644 --- a/archive/2022/participants/2021-smtinterpol.html +++ b/archive/2022/participants/2021-smtinterpol.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -144,8 +144,6 @@

    2021-SMTInterpol

    - + - - diff --git a/archive/2022/participants/2021-stp.html b/archive/2022/participants/2021-stp.html index b3b02e78..83bb5081 100644 --- a/archive/2022/participants/2021-stp.html +++ b/archive/2022/participants/2021-stp.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -144,8 +144,6 @@

    2021-STP

    - + - - diff --git a/archive/2022/participants/2021-yices2-incremental.html b/archive/2022/participants/2021-yices2-incremental.html index 3d2d9faf..444505d3 100644 --- a/archive/2022/participants/2021-yices2-incremental.html +++ b/archive/2022/participants/2021-yices2-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -144,8 +144,6 @@

    2021-Yices2 incremental

    - + - - diff --git a/archive/2022/participants/2021-yices2-model-validation.html b/archive/2022/participants/2021-yices2-model-validation.html index c252f4ea..77b07e00 100644 --- a/archive/2022/participants/2021-yices2-model-validation.html +++ b/archive/2022/participants/2021-yices2-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -144,8 +144,6 @@

    2021-Yices2 model-validation

    - + - - diff --git a/archive/2022/participants/2021-yices2.html b/archive/2022/participants/2021-yices2.html index 64da2be5..ce20ecb8 100644 --- a/archive/2022/participants/2021-yices2.html +++ b/archive/2022/participants/2021-yices2.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -144,8 +144,6 @@

    2021-Yices2

    - + - - diff --git a/archive/2022/participants/2021-z3.html b/archive/2022/participants/2021-z3.html index 8ef96375..cc6e02fa 100644 --- a/archive/2022/participants/2021-z3.html +++ b/archive/2022/participants/2021-z3.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -144,8 +144,6 @@

    2021-z3

    - + - - diff --git a/archive/2022/participants/bitwuzla.html b/archive/2022/participants/bitwuzla.html index c1793d55..099f0ab1 100644 --- a/archive/2022/participants/bitwuzla.html +++ b/archive/2022/participants/bitwuzla.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    Bitwuzla

    - + - - diff --git a/archive/2022/participants/colibri.html b/archive/2022/participants/colibri.html index 431b334d..9fc89b44 100644 --- a/archive/2022/participants/colibri.html +++ b/archive/2022/participants/colibri.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    COLIBRI

    - + - - diff --git a/archive/2022/participants/cvc5-cloud.html b/archive/2022/participants/cvc5-cloud.html index e6e05043..a1566755 100644 --- a/archive/2022/participants/cvc5-cloud.html +++ b/archive/2022/participants/cvc5-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    cvc5-cloud

    - + - - diff --git a/archive/2022/participants/cvc5-lfsc.html b/archive/2022/participants/cvc5-lfsc.html index 34b3bf0c..61eca5f1 100644 --- a/archive/2022/participants/cvc5-lfsc.html +++ b/archive/2022/participants/cvc5-lfsc.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    cvc5-lfsc

    - + - - diff --git a/archive/2022/participants/cvc5.html b/archive/2022/participants/cvc5.html index 80cf501b..ec0ca496 100644 --- a/archive/2022/participants/cvc5.html +++ b/archive/2022/participants/cvc5.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    cvc5

    - + - - diff --git a/archive/2022/participants/mathsat.html b/archive/2022/participants/mathsat.html index 8685a021..7475c7e4 100644 --- a/archive/2022/participants/mathsat.html +++ b/archive/2022/participants/mathsat.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    MathSAT

    - + - - diff --git a/archive/2022/participants/nra-ls.html b/archive/2022/participants/nra-ls.html index a9f7e8c9..ed6858d6 100644 --- a/archive/2022/participants/nra-ls.html +++ b/archive/2022/participants/nra-ls.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    NRA-LS

    - + - - diff --git a/archive/2022/participants/opensmt-fixed.html b/archive/2022/participants/opensmt-fixed.html index 0bf66f01..5141c361 100644 --- a/archive/2022/participants/opensmt-fixed.html +++ b/archive/2022/participants/opensmt-fixed.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    OpenSMT-fixed

    - + - - diff --git a/archive/2022/participants/opensmt.html b/archive/2022/participants/opensmt.html index 11f07e49..288f4efe 100644 --- a/archive/2022/participants/opensmt.html +++ b/archive/2022/participants/opensmt.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    OpenSMT

    - + - - diff --git a/archive/2022/participants/ostrich.html b/archive/2022/participants/ostrich.html index f5ffc0b1..e6428551 100644 --- a/archive/2022/participants/ostrich.html +++ b/archive/2022/participants/ostrich.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    OSTRICH

    - + - - diff --git a/archive/2022/participants/q3b-pbdd.html b/archive/2022/participants/q3b-pbdd.html index 9f47a1fa..15b643b8 100644 --- a/archive/2022/participants/q3b-pbdd.html +++ b/archive/2022/participants/q3b-pbdd.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    Q3B-pBDD

    - + - - diff --git a/archive/2022/participants/q3b.html b/archive/2022/participants/q3b.html index df9c15ec..6b78417f 100644 --- a/archive/2022/participants/q3b.html +++ b/archive/2022/participants/q3b.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    Q3B

    - + - - diff --git a/archive/2022/participants/smt-rat-mcsat-22-06.html b/archive/2022/participants/smt-rat-mcsat-22-06.html index 06408a7e..a689a1dc 100644 --- a/archive/2022/participants/smt-rat-mcsat-22-06.html +++ b/archive/2022/participants/smt-rat-mcsat-22-06.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    SMT-RAT-MCSAT 22.06

    - + - - diff --git a/archive/2022/participants/smtinterpol-fixed.html b/archive/2022/participants/smtinterpol-fixed.html index 3924bb40..3512d881 100644 --- a/archive/2022/participants/smtinterpol-fixed.html +++ b/archive/2022/participants/smtinterpol-fixed.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    smtinterpol-fixed

    - + - - diff --git a/archive/2022/participants/smtinterpol.html b/archive/2022/participants/smtinterpol.html index dfeff5d1..a713c309 100644 --- a/archive/2022/participants/smtinterpol.html +++ b/archive/2022/participants/smtinterpol.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    smtinterpol

    - + - - diff --git a/archive/2022/participants/smts-cube-and-conquer.html b/archive/2022/participants/smts-cube-and-conquer.html index eecb27e0..bdaddbc4 100644 --- a/archive/2022/participants/smts-cube-and-conquer.html +++ b/archive/2022/participants/smts-cube-and-conquer.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    SMTS cube-and-conquer

    - + - - diff --git a/archive/2022/participants/smts-portfolio.html b/archive/2022/participants/smts-portfolio.html index 0bb17a28..77c9995f 100644 --- a/archive/2022/participants/smts-portfolio.html +++ b/archive/2022/participants/smts-portfolio.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    SMTS portfolio

    - + - - diff --git a/archive/2022/participants/solsmt.html b/archive/2022/participants/solsmt.html index 65becaae..64e1638f 100644 --- a/archive/2022/participants/solsmt.html +++ b/archive/2022/participants/solsmt.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    solsmt

    - + - - diff --git a/archive/2022/participants/stp-fixed.html b/archive/2022/participants/stp-fixed.html index 9ad04dab..838bdd79 100644 --- a/archive/2022/participants/stp-fixed.html +++ b/archive/2022/participants/stp-fixed.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    STP-fixed

    - + - - diff --git a/archive/2022/participants/stp.html b/archive/2022/participants/stp.html index d56ae576..7a39c6fd 100644 --- a/archive/2022/participants/stp.html +++ b/archive/2022/participants/stp.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    STP

    - + - - diff --git a/archive/2022/participants/ultimateeliminator-mathsat.html b/archive/2022/participants/ultimateeliminator-mathsat.html index 0c7c9da8..24b864ba 100644 --- a/archive/2022/participants/ultimateeliminator-mathsat.html +++ b/archive/2022/participants/ultimateeliminator-mathsat.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    UltimateEliminator+MathSAT

    - + - - diff --git a/archive/2022/participants/vampire.html b/archive/2022/participants/vampire.html index cd657cc1..7fb8039f 100644 --- a/archive/2022/participants/vampire.html +++ b/archive/2022/participants/vampire.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    Vampire

    - + - - diff --git a/archive/2022/participants/verit-rasat-redlog.html b/archive/2022/participants/verit-rasat-redlog.html index ed961a46..94294a2d 100644 --- a/archive/2022/participants/verit-rasat-redlog.html +++ b/archive/2022/participants/verit-rasat-redlog.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    veriT+raSAT+Redlog

    - + - - diff --git a/archive/2022/participants/verit.html b/archive/2022/participants/verit.html index 49bdd075..2e0e88ab 100644 --- a/archive/2022/participants/verit.html +++ b/archive/2022/participants/verit.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    veriT

    - + - - diff --git a/archive/2022/participants/yices-ismt-fixed.html b/archive/2022/participants/yices-ismt-fixed.html index 58d64427..74df6e87 100644 --- a/archive/2022/participants/yices-ismt-fixed.html +++ b/archive/2022/participants/yices-ismt-fixed.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    Yices-ismt-fixed

    - + - - diff --git a/archive/2022/participants/yices-ismt.html b/archive/2022/participants/yices-ismt.html index 25573088..3549f3f2 100644 --- a/archive/2022/participants/yices-ismt.html +++ b/archive/2022/participants/yices-ismt.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    Yices-ismt

    - + - - diff --git a/archive/2022/participants/yices2.html b/archive/2022/participants/yices2.html index 616598e0..da920b80 100644 --- a/archive/2022/participants/yices2.html +++ b/archive/2022/participants/yices2.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    Yices2

    - + - - diff --git a/archive/2022/participants/yicesqs.html b/archive/2022/participants/yicesqs.html index 1fb07d5d..5f59c5c0 100644 --- a/archive/2022/participants/yicesqs.html +++ b/archive/2022/participants/yicesqs.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    YicesQS

    - + - - diff --git a/archive/2022/participants/z3-4-8-17.html b/archive/2022/participants/z3-4-8-17.html index a800d938..78824022 100644 --- a/archive/2022/participants/z3-4-8-17.html +++ b/archive/2022/participants/z3-4-8-17.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    z3-4.8.17

    - + - - diff --git a/archive/2022/participants/z3-bv.html b/archive/2022/participants/z3-bv.html index 7a1736fe..bf4d15ae 100644 --- a/archive/2022/participants/z3-bv.html +++ b/archive/2022/participants/z3-bv.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    Z3++BV

    - + - - diff --git a/archive/2022/participants/z3-fixed.html b/archive/2022/participants/z3-fixed.html index 31ecee18..54cd6294 100644 --- a/archive/2022/participants/z3-fixed.html +++ b/archive/2022/participants/z3-fixed.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    Z3++-fixed

    - + - - diff --git a/archive/2022/participants/z3.html b/archive/2022/participants/z3.html index 1d128b54..c658d4a2 100644 --- a/archive/2022/participants/z3.html +++ b/archive/2022/participants/z3.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    Z3++

    - + - - diff --git a/archive/2022/participants/z3str4.html b/archive/2022/participants/z3str4.html index 206b086a..f438f48f 100644 --- a/archive/2022/participants/z3str4.html +++ b/archive/2022/participants/z3str4.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    Z3str4

    - + - - diff --git a/archive/2022/proof-track.html b/archive/2022/proof-track.html index 651b0b3a..1af7dfb4 100644 --- a/archive/2022/proof-track.html +++ b/archive/2022/proof-track.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -173,7 +173,6 @@

    Key-Value Pairs

    - + - diff --git a/archive/2022/results.html b/archive/2022/results.html index ca4206ca..5ef6cbe5 100644 --- a/archive/2022/results.html +++ b/archive/2022/results.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -142,261 +142,261 @@

    Tracks Summary

    Divisions

    @@ -411,7 +411,6 @@

    Divisions

    - + - diff --git a/archive/2022/results/abv-proof-exhibition.html b/archive/2022/results/abv-proof-exhibition.html index 105a35e8..41a15275 100644 --- a/archive/2022/results/abv-proof-exhibition.html +++ b/archive/2022/results/abv-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    ABV (Proof Exhibition Track)

    Competition results for the ABV - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    ABV (Proof Exhibition Track)

    - + cvc5-lfsc 0 12 @@ -130,7 +130,7 @@

    ABV (Proof Exhibition Track)

    - + cvc5 0 12 @@ -152,7 +152,7 @@

    ABV (Proof Exhibition Track)

    - + cvc5-lfsc 0 121201.6461201.645111 @@ -161,7 +161,7 @@

    ABV (Proof Exhibition Track)

    - + cvc5 0 121203.7341203.602111 @@ -185,7 +185,6 @@

    ABV (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/abv-single-query.html b/archive/2022/results/abv-single-query.html index 08efe9c7..c8e8b3df 100644 --- a/archive/2022/results/abv-single-query.html +++ b/archive/2022/results/abv-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    ABV (Single Query Track)

    Competition results for the ABV - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    ABV (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    ABV (Single Query Track)

    - + z3-4.8.17n 0 90 @@ -142,7 +142,7 @@

    ABV (Single Query Track)

    - + 2021-cvc5n 0 34 @@ -153,7 +153,7 @@

    ABV (Single Query Track)

    - + cvc5 0 34 @@ -164,7 +164,7 @@

    ABV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 1 @@ -175,7 +175,7 @@

    ABV (Single Query Track)

    - + Bitwuzla 0 0 @@ -197,7 +197,7 @@

    ABV (Single Query Track)

    - + z3-4.8.17n 0 9084689.76684689.9919067237968 @@ -206,7 +206,7 @@

    ABV (Single Query Track)

    - + cvc5 0 34110291.171110948.47734112313570 @@ -215,7 +215,7 @@

    ABV (Single Query Track)

    - + 2021-cvc5n 0 34119937.0121018.96834112313580 @@ -224,7 +224,7 @@

    ABV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 13717.6853316.5041011681 @@ -233,7 +233,7 @@

    ABV (Single Query Track)

    - + Bitwuzla 0 00.7692.4910001690 @@ -253,7 +253,7 @@

    ABV (Single Query Track)

    - + z3-4.8.17n 0 672455.4542455.33767670210068 @@ -262,7 +262,7 @@

    ABV (Single Query Track)

    - + cvc5 0 1137379.20537792.196111105810070 @@ -271,7 +271,7 @@

    ABV (Single Query Track)

    - + 2021-cvc5n 0 1140700.64241331.535111105810080 @@ -280,7 +280,7 @@

    ABV (Single Query Track)

    - + Bitwuzla 0 00.3231.074000691000 @@ -289,7 +289,7 @@

    ABV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 01849.9831673.288000691001 @@ -309,7 +309,7 @@

    ABV (Single Query Track)

    - + z3-4.8.17n 0 232458.6042458.92823023414268 @@ -318,7 +318,7 @@

    ABV (Single Query Track)

    - + cvc5 0 232526.7332645.86623023414270 @@ -327,7 +327,7 @@

    ABV (Single Query Track)

    - + 2021-cvc5n 0 233291.4353387.84223023414280 @@ -336,7 +336,7 @@

    ABV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 1186.211107.053101261421 @@ -345,7 +345,7 @@

    ABV (Single Query Track)

    - + Bitwuzla 0 00.1240.385000271420 @@ -365,7 +365,7 @@

    ABV (Single Query Track)

    - + z3-4.8.17n 0 891992.4231992.278967228080 @@ -374,7 +374,7 @@

    ABV (Single Query Track)

    - + 2021-cvc5n 0 323305.0763305.213321121137137 @@ -383,7 +383,7 @@

    ABV (Single Query Track)

    - + cvc5 0 293377.2233377.288291019140140 @@ -392,7 +392,7 @@

    ABV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 1985.593642.3791011685 @@ -401,7 +401,7 @@

    ABV (Single Query Track)

    - + Bitwuzla 0 00.7692.4910001690 @@ -425,7 +425,6 @@

    ABV (Single Query Track)

    - + - diff --git a/archive/2022/results/abv-unsat-core.html b/archive/2022/results/abv-unsat-core.html index 0ddebec6..585ddeb7 100644 --- a/archive/2022/results/abv-unsat-core.html +++ b/archive/2022/results/abv-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    ABV (Unsat Core Track)

    Competition results for the ABV - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    ABV (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    ABV (Unsat Core Track)

    - + z3-4.8.17n 0 95 @@ -137,7 +137,7 @@

    ABV (Unsat Core Track)

    - + 2021-cvc5-ucn 0 62 @@ -148,7 +148,7 @@

    ABV (Unsat Core Track)

    - + cvc5 0 16 @@ -159,7 +159,7 @@

    ABV (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -181,7 +181,7 @@

    ABV (Unsat Core Track)

    - + z3-4.8.17n 0 955049.775049.7943 @@ -190,7 +190,7 @@

    ABV (Unsat Core Track)

    - + 2021-cvc5-ucn 0 620.940.9290 @@ -199,7 +199,7 @@

    ABV (Unsat Core Track)

    - + cvc5 0 161.1371.1270 @@ -208,7 +208,7 @@

    ABV (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0116.52865.1360 @@ -232,7 +232,6 @@

    ABV (Unsat Core Track)

    - + - diff --git a/archive/2022/results/abvfp-proof-exhibition.html b/archive/2022/results/abvfp-proof-exhibition.html index 204cda31..c92aa34c 100644 --- a/archive/2022/results/abvfp-proof-exhibition.html +++ b/archive/2022/results/abvfp-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    ABVFP (Proof Exhibition Track)

    Competition results for the ABVFP - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    ABVFP (Proof Exhibition Track)

    - + cvc5-lfsc 0 1 @@ -130,7 +130,7 @@

    ABVFP (Proof Exhibition Track)

    - + cvc5 0 1 @@ -152,7 +152,7 @@

    ABVFP (Proof Exhibition Track)

    - + cvc5-lfsc 0 10.2630.26200 @@ -161,7 +161,7 @@

    ABVFP (Proof Exhibition Track)

    - + cvc5 0 12.9672.82100 @@ -185,7 +185,6 @@

    ABVFP (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/abvfp-single-query.html b/archive/2022/results/abvfp-single-query.html index fca5cc4b..91f78dcc 100644 --- a/archive/2022/results/abvfp-single-query.html +++ b/archive/2022/results/abvfp-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    ABVFP (Single Query Track)

    Competition results for the ABVFP - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    ABVFP (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    ABVFP (Single Query Track)

    - + z3-4.8.17n 0 25 @@ -142,7 +142,7 @@

    ABVFP (Single Query Track)

    - + cvc5 0 18 @@ -153,7 +153,7 @@

    ABVFP (Single Query Track)

    - + 2021-cvc5n 0 17 @@ -164,7 +164,7 @@

    ABVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 10 @@ -175,7 +175,7 @@

    ABVFP (Single Query Track)

    - + Bitwuzla 0 0 @@ -197,7 +197,7 @@

    ABVFP (Single Query Track)

    - + z3-4.8.17n 0 256025.4956025.4492523255 @@ -206,7 +206,7 @@

    ABVFP (Single Query Track)

    - + cvc5 0 1815008.11215040.236181711212 @@ -215,7 +215,7 @@

    ABVFP (Single Query Track)

    - + 2021-cvc5n 0 1715604.00215604.011171521313 @@ -224,7 +224,7 @@

    ABVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 10298.697251.60210100200 @@ -233,7 +233,7 @@

    ABVFP (Single Query Track)

    - + Bitwuzla 0 00.1340.366000300 @@ -253,7 +253,7 @@

    ABVFP (Single Query Track)

    - + z3-4.8.17n 0 231225.4171225.37423230165 @@ -262,7 +262,7 @@

    ABVFP (Single Query Track)

    - + cvc5 0 179008.0329040.157171707612 @@ -271,7 +271,7 @@

    ABVFP (Single Query Track)

    - + 2021-cvc5n 0 1510803.63210803.64151509613 @@ -280,7 +280,7 @@

    ABVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 10271.703233.267101001460 @@ -289,7 +289,7 @@

    ABVFP (Single Query Track)

    - + Bitwuzla 0 00.1070.2910002460 @@ -309,7 +309,7 @@

    ABVFP (Single Query Track)

    - + z3-4.8.17n 0 20.0790.0752020285 @@ -318,7 +318,7 @@

    ABVFP (Single Query Track)

    - + 2021-cvc5n 0 20.370.37120202813 @@ -327,7 +327,7 @@

    ABVFP (Single Query Track)

    - + cvc5 0 11200.081200.07910112812 @@ -336,7 +336,7 @@

    ABVFP (Single Query Track)

    - + Bitwuzla 0 00.0090.0250002280 @@ -345,7 +345,7 @@

    ABVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 08.9275.3930002280 @@ -365,7 +365,7 @@

    ABVFP (Single Query Track)

    - + z3-4.8.17n 0 25145.495145.4492523255 @@ -374,7 +374,7 @@

    ABVFP (Single Query Track)

    - + 2021-cvc5n 0 17316.002316.011171521313 @@ -383,7 +383,7 @@

    ABVFP (Single Query Track)

    - + cvc5 0 16339.675339.666161511414 @@ -392,7 +392,7 @@

    ABVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 8229.633188.089880222 @@ -401,7 +401,7 @@

    ABVFP (Single Query Track)

    - + Bitwuzla 0 00.1340.366000300 @@ -425,7 +425,6 @@

    ABVFP (Single Query Track)

    - + - diff --git a/archive/2022/results/abvfp-unsat-core.html b/archive/2022/results/abvfp-unsat-core.html index 7ae89cb0..b0c358f8 100644 --- a/archive/2022/results/abvfp-unsat-core.html +++ b/archive/2022/results/abvfp-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    ABVFP (Unsat Core Track)

    Competition results for the ABVFP - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    ABVFP (Unsat Core Track)

    Sequential PerformanceParallel Performance —— - - + + @@ -126,7 +126,7 @@

    ABVFP (Unsat Core Track)

    - + z3-4.8.17n 0 0 @@ -137,7 +137,7 @@

    ABVFP (Unsat Core Track)

    - + cvc5 0 0 @@ -148,7 +148,7 @@

    ABVFP (Unsat Core Track)

    - + 2021-cvc5-ucn 0 0 @@ -159,7 +159,7 @@

    ABVFP (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -181,7 +181,7 @@

    ABVFP (Unsat Core Track)

    - + z3-4.8.17n 0 00.030.0280 @@ -190,7 +190,7 @@

    ABVFP (Unsat Core Track)

    - + cvc5 0 00.1440.1440 @@ -199,7 +199,7 @@

    ABVFP (Unsat Core Track)

    - + 2021-cvc5-ucn 0 00.8730.8730 @@ -208,7 +208,7 @@

    ABVFP (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 04.4312.8040 @@ -232,7 +232,6 @@

    ABVFP (Unsat Core Track)

    - + - diff --git a/archive/2022/results/abvfplra-incremental.html b/archive/2022/results/abvfplra-incremental.html index 93ddc244..a32809f1 100644 --- a/archive/2022/results/abvfplra-incremental.html +++ b/archive/2022/results/abvfplra-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    ABVFPLRA (Incremental Track)

    Competition results for the ABVFPLRA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    ABVFPLRA (Incremental Track)

    Parallel Performance UltimateEliminator+MathSAT - - + + @@ -124,7 +124,7 @@

    ABVFPLRA (Incremental Track)

    - + UltimateEliminator+MathSAT 0 818106.3781.54414510 @@ -133,7 +133,7 @@

    ABVFPLRA (Incremental Track)

    - + cvc5 0 8181208.7891208.72414511 @@ -142,7 +142,7 @@

    ABVFPLRA (Incremental Track)

    - + Bitwuzla 0 552.4432.59322140 @@ -166,7 +166,6 @@

    ABVFPLRA (Incremental Track)

    - + - diff --git a/archive/2022/results/abvfplra-proof-exhibition.html b/archive/2022/results/abvfplra-proof-exhibition.html index 3c60c7c7..a4044ca1 100644 --- a/archive/2022/results/abvfplra-proof-exhibition.html +++ b/archive/2022/results/abvfplra-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    ABVFPLRA (Proof Exhibition Track)

    Competition results for the ABVFPLRA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    ABVFPLRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 1 @@ -130,7 +130,7 @@

    ABVFPLRA (Proof Exhibition Track)

    - + cvc5 0 0 @@ -152,7 +152,7 @@

    ABVFPLRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 11200.4621200.4631 @@ -161,7 +161,7 @@

    ABVFPLRA (Proof Exhibition Track)

    - + cvc5 0 02400.3352400.33342 @@ -185,7 +185,6 @@

    ABVFPLRA (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/abvfplra-single-query.html b/archive/2022/results/abvfplra-single-query.html index 1a242239..26d12b4b 100644 --- a/archive/2022/results/abvfplra-single-query.html +++ b/archive/2022/results/abvfplra-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    ABVFPLRA (Single Query Track)

    Competition results for the ABVFPLRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    ABVFPLRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    ABVFPLRA (Single Query Track)

    - + z3-4.8.17n 0 47 @@ -142,7 +142,7 @@

    ABVFPLRA (Single Query Track)

    - + cvc5 0 36 @@ -153,7 +153,7 @@

    ABVFPLRA (Single Query Track)

    - + 2021-cvc5n 0 22 @@ -164,7 +164,7 @@

    ABVFPLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 10 @@ -175,7 +175,7 @@

    ABVFPLRA (Single Query Track)

    - + Bitwuzla 0 0 @@ -197,7 +197,7 @@

    ABVFPLRA (Single Query Track)

    - + z3-4.8.17n 0 4732278.09732278.115474522826 @@ -206,7 +206,7 @@

    ABVFPLRA (Single Query Track)

    - + cvc5 0 3651132.2151145.989363243939 @@ -215,7 +215,7 @@

    ABVFPLRA (Single Query Track)

    - + 2021-cvc5n 0 2254837.16154843.595221845345 @@ -224,7 +224,7 @@

    ABVFPLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 10426.57298.24710100650 @@ -233,7 +233,7 @@

    ABVFPLRA (Single Query Track)

    - + Bitwuzla 0 00.330.918000750 @@ -253,7 +253,7 @@

    ABVFPLRA (Single Query Track)

    - + z3-4.8.17n 0 453.7353.6484545003026 @@ -262,7 +262,7 @@

    ABVFPLRA (Single Query Track)

    - + cvc5 0 3219877.90819891.67732320133039 @@ -271,7 +271,7 @@

    ABVFPLRA (Single Query Track)

    - + 2021-cvc5n 0 1832701.81332701.89818180273045 @@ -280,7 +280,7 @@

    ABVFPLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 10285.78201.3491010035300 @@ -289,7 +289,7 @@

    ABVFPLRA (Single Query Track)

    - + Bitwuzla 0 00.1980.54900045300 @@ -309,7 +309,7 @@

    ABVFPLRA (Single Query Track)

    - + 2021-cvc5n 0 434.534.50240407145 @@ -318,7 +318,7 @@

    ABVFPLRA (Single Query Track)

    - + cvc5 0 454.30254.31240407139 @@ -327,7 +327,7 @@

    ABVFPLRA (Single Query Track)

    - + z3-4.8.17n 0 22274.2012274.30720227126 @@ -336,7 +336,7 @@

    ABVFPLRA (Single Query Track)

    - + Bitwuzla 0 00.0170.0490004710 @@ -345,7 +345,7 @@

    ABVFPLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 018.05211.2280004710 @@ -365,7 +365,7 @@

    ABVFPLRA (Single Query Track)

    - + z3-4.8.17n 0 46658.247658.155464512927 @@ -374,7 +374,7 @@

    ABVFPLRA (Single Query Track)

    - + cvc5 0 211324.8311324.83211835454 @@ -383,7 +383,7 @@

    ABVFPLRA (Single Query Track)

    - + 2021-cvc5n 0 201201.0551201.04201735549 @@ -392,7 +392,7 @@

    ABVFPLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 10426.57298.24710100650 @@ -401,7 +401,7 @@

    ABVFPLRA (Single Query Track)

    - + Bitwuzla 0 00.330.918000750 @@ -425,7 +425,6 @@

    ABVFPLRA (Single Query Track)

    - + - diff --git a/archive/2022/results/abvfplra-unsat-core.html b/archive/2022/results/abvfplra-unsat-core.html index bc0c0a5e..f3dfa2dc 100644 --- a/archive/2022/results/abvfplra-unsat-core.html +++ b/archive/2022/results/abvfplra-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    ABVFPLRA (Unsat Core Track)

    Competition results for the ABVFPLRA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    ABVFPLRA (Unsat Core Track)

    Sequential PerformanceParallel Performance —— - - + + @@ -126,7 +126,7 @@

    ABVFPLRA (Unsat Core Track)

    - + 2021-cvc5-ucn 0 0 @@ -137,7 +137,7 @@

    ABVFPLRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -148,7 +148,7 @@

    ABVFPLRA (Unsat Core Track)

    - + cvc5 0 0 @@ -159,7 +159,7 @@

    ABVFPLRA (Unsat Core Track)

    - + z3-4.8.17n 0 0 @@ -181,7 +181,7 @@

    ABVFPLRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 018.0511.0220 @@ -190,7 +190,7 @@

    ABVFPLRA (Unsat Core Track)

    - + 2021-cvc5-ucn 0 014.19114.190 @@ -199,7 +199,7 @@

    ABVFPLRA (Unsat Core Track)

    - + cvc5 0 0175.556175.5580 @@ -208,7 +208,7 @@

    ABVFPLRA (Unsat Core Track)

    - + z3-4.8.17n 0 0383.12383.1620 @@ -232,7 +232,6 @@

    ABVFPLRA (Unsat Core Track)

    - + - diff --git a/archive/2022/results/alia-incremental.html b/archive/2022/results/alia-incremental.html index b08d5fbe..f4d3d3c9 100644 --- a/archive/2022/results/alia-incremental.html +++ b/archive/2022/results/alia-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    ALIA (Incremental Track)

    Competition results for the ALIA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    ALIA (Incremental Track)

    Parallel Performance cvc5 - - + + @@ -124,7 +124,7 @@

    ALIA (Incremental Track)

    - + z3-4.8.17n 0 202552177.646163.68900 @@ -133,7 +133,7 @@

    ALIA (Incremental Track)

    - + 2021-z3n 0 202552178.255164.31500 @@ -142,7 +142,7 @@

    ALIA (Incremental Track)

    - + cvc5 0 202550722.769701.20820 @@ -151,7 +151,7 @@

    ALIA (Incremental Track)

    - + smtinterpol 0 202523987.157436.74290 @@ -160,7 +160,7 @@

    ALIA (Incremental Track)

    - + UltimateEliminator+MathSAT 0 1985818021.7086858.16539711 @@ -184,7 +184,6 @@

    ALIA (Incremental Track)

    - + - diff --git a/archive/2022/results/alia-proof-exhibition.html b/archive/2022/results/alia-proof-exhibition.html index 80fb2bfc..dd79040c 100644 --- a/archive/2022/results/alia-proof-exhibition.html +++ b/archive/2022/results/alia-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    ALIA (Proof Exhibition Track)

    Competition results for the ALIA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    ALIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 41 @@ -130,7 +130,7 @@

    ALIA (Proof Exhibition Track)

    - + smtinterpol 0 41 @@ -141,7 +141,7 @@

    ALIA (Proof Exhibition Track)

    - + cvc5 0 35 @@ -152,7 +152,7 @@

    ALIA (Proof Exhibition Track)

    - + veriT 0 27 @@ -174,7 +174,7 @@

    ALIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 4116.34116.3400 @@ -183,7 +183,7 @@

    ALIA (Proof Exhibition Track)

    - + smtinterpol 0 41108.00939.30900 @@ -192,7 +192,7 @@

    ALIA (Proof Exhibition Track)

    - + cvc5 0 351681.1631628.94160 @@ -201,7 +201,7 @@

    ALIA (Proof Exhibition Track)

    - + veriT 0 2716801.83216801.6191414 @@ -225,7 +225,6 @@

    ALIA (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/alia-single-query.html b/archive/2022/results/alia-single-query.html index c0105513..4e1c652e 100644 --- a/archive/2022/results/alia-single-query.html +++ b/archive/2022/results/alia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    ALIA (Single Query Track)

    Competition results for the ALIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    ALIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) smtinterpolsmtinterpolsmtinterpol - - + + cvc5 - - + + smtinterpol - + @@ -131,7 +131,7 @@

    ALIA (Single Query Track)

    - + z3-4.8.17n 0 19 @@ -142,7 +142,7 @@

    ALIA (Single Query Track)

    - + smtinterpol-fixedn 0 19 @@ -153,7 +153,7 @@

    ALIA (Single Query Track)

    - + smtinterpol 0 19 @@ -164,7 +164,7 @@

    ALIA (Single Query Track)

    - + 2020-CVC4n 0 18 @@ -175,7 +175,7 @@

    ALIA (Single Query Track)

    - + cvc5 0 18 @@ -186,7 +186,7 @@

    ALIA (Single Query Track)

    - + Vampire 0 12 @@ -197,7 +197,7 @@

    ALIA (Single Query Track)

    - + veriT 0 4 @@ -208,7 +208,7 @@

    ALIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 1 @@ -230,7 +230,7 @@

    ALIA (Single Query Track)

    - + z3-4.8.17n 0 190.9390.9041911800 @@ -239,7 +239,7 @@

    ALIA (Single Query Track)

    - + smtinterpol-fixedn 0 19111.33639.9341911800 @@ -248,7 +248,7 @@

    ALIA (Single Query Track)

    - + smtinterpol 0 19112.21741.2861911800 @@ -257,7 +257,7 @@

    ALIA (Single Query Track)

    - + 2020-CVC4n 0 182.9012.8931801810 @@ -266,7 +266,7 @@

    ALIA (Single Query Track)

    - + cvc5 0 185.8755.8681801810 @@ -275,7 +275,7 @@

    ALIA (Single Query Track)

    - + Vampire 0 1310948.6718142.8491301366 @@ -284,7 +284,7 @@

    ALIA (Single Query Track)

    - + veriT 0 418000.06118000.0614041515 @@ -293,7 +293,7 @@

    ALIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 1125.65564.009110180 @@ -313,7 +313,7 @@

    ALIA (Single Query Track)

    - + z3-4.8.17n 0 10.0410.0391100180 @@ -322,7 +322,7 @@

    ALIA (Single Query Track)

    - + smtinterpol 0 10.7040.4391100180 @@ -331,7 +331,7 @@

    ALIA (Single Query Track)

    - + smtinterpol-fixedn 0 10.7050.4431100180 @@ -340,7 +340,7 @@

    ALIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 14.9143.0281100180 @@ -349,7 +349,7 @@

    ALIA (Single Query Track)

    - + 2020-CVC4n 0 00.3040.30001180 @@ -358,7 +358,7 @@

    ALIA (Single Query Track)

    - + cvc5 0 00.920.9190001180 @@ -367,7 +367,7 @@

    ALIA (Single Query Track)

    - + Vampire 0 01200.01200.00001186 @@ -376,7 +376,7 @@

    ALIA (Single Query Track)

    - + veriT 0 01200.01200.000011815 @@ -396,7 +396,7 @@

    ALIA (Single Query Track)

    - + z3-4.8.17n 0 180.8990.86618018010 @@ -405,7 +405,7 @@

    ALIA (Single Query Track)

    - + 2020-CVC4n 0 182.5972.59218018010 @@ -414,7 +414,7 @@

    ALIA (Single Query Track)

    - + cvc5 0 184.9554.94918018010 @@ -423,7 +423,7 @@

    ALIA (Single Query Track)

    - + smtinterpol-fixedn 0 18110.63139.49118018010 @@ -432,7 +432,7 @@

    ALIA (Single Query Track)

    - + smtinterpol 0 18111.51340.84718018010 @@ -441,7 +441,7 @@

    ALIA (Single Query Track)

    - + Vampire 0 139748.6716942.84913013516 @@ -450,7 +450,7 @@

    ALIA (Single Query Track)

    - + veriT 0 416800.06116800.06140414115 @@ -459,7 +459,7 @@

    ALIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0120.7460.9810001810 @@ -479,7 +479,7 @@

    ALIA (Single Query Track)

    - + z3-4.8.17n 0 190.9390.9041911800 @@ -488,7 +488,7 @@

    ALIA (Single Query Track)

    - + smtinterpol-fixedn 0 19111.33639.9341911800 @@ -497,7 +497,7 @@

    ALIA (Single Query Track)

    - + smtinterpol 0 19112.21741.2861911800 @@ -506,7 +506,7 @@

    ALIA (Single Query Track)

    - + 2020-CVC4n 0 182.9012.8931801810 @@ -515,7 +515,7 @@

    ALIA (Single Query Track)

    - + cvc5 0 185.8755.8681801810 @@ -524,7 +524,7 @@

    ALIA (Single Query Track)

    - + Vampire 0 9360.251271.3239091010 @@ -533,7 +533,7 @@

    ALIA (Single Query Track)

    - + veriT 0 4360.061360.0614041515 @@ -542,7 +542,7 @@

    ALIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 1125.65564.009110180 @@ -566,7 +566,6 @@

    ALIA (Single Query Track)

    - + - diff --git a/archive/2022/results/ania-incremental.html b/archive/2022/results/ania-incremental.html index d045f58b..78804316 100644 --- a/archive/2022/results/ania-incremental.html +++ b/archive/2022/results/ania-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    ANIA (Incremental Track)

    Competition results for the ANIA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    ANIA (Incremental Track)

    Parallel Performance cvc5 - - + + @@ -124,7 +124,7 @@

    ANIA (Incremental Track)

    - + cvc5 0 2348884.84481.92700 @@ -133,7 +133,7 @@

    ANIA (Incremental Track)

    - + smtinterpol 0 23486135.96250.02820 @@ -142,7 +142,7 @@

    ANIA (Incremental Track)

    - + UltimateEliminator+MathSAT 0 132843004.52969.417102042 @@ -166,7 +166,6 @@

    ANIA (Incremental Track)

    - + - diff --git a/archive/2022/results/arith-cloud.html b/archive/2022/results/arith-cloud.html index a4f4e479..34793d53 100644 --- a/archive/2022/results/arith-cloud.html +++ b/archive/2022/results/arith-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Arith (Cloud Track)

    Competition results for the Arith - + division - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    Arith (Cloud Track)

    - + Vampire 8 1432668.2291401427-30 @@ -126,7 +126,7 @@

    Arith (Cloud Track)

    - + cvc5-cloud 20 148005.83310140-30 @@ -146,7 +146,7 @@

    Arith (Cloud Track)

    - + cvc5-cloud 1 01200.00001370 @@ -155,7 +155,7 @@

    Arith (Cloud Track)

    - + Vampire 1 01200.00001370 @@ -175,7 +175,7 @@

    Arith (Cloud Track)

    - + Vampire 7 149868.229140148160 @@ -184,7 +184,7 @@

    Arith (Cloud Track)

    - + cvc5-cloud 19 125205.83310121160 @@ -204,7 +204,7 @@

    Arith (Cloud Track)

    - + Vampire 0 13903.6271301328-328 @@ -213,7 +213,7 @@

    Arith (Cloud Track)

    - + cvc5-cloud 0 1965.83310140-340 @@ -237,7 +237,6 @@

    Arith (Cloud Track)

    - + - diff --git a/archive/2022/results/arith-incremental.html b/archive/2022/results/arith-incremental.html index 2bddefff..11685c26 100644 --- a/archive/2022/results/arith-incremental.html +++ b/archive/2022/results/arith-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Arith (Incremental Track)

    Competition results for the Arith - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    Arith (Incremental Track)

    Parallel Performance cvc5 - - + + @@ -124,7 +124,7 @@

    Arith (Incremental Track)

    - + 2021-cvc5-incn 0 41362100.77991.452000 @@ -133,7 +133,7 @@

    Arith (Incremental Track)

    - + cvc5 0 41362226.319223.584000 @@ -142,7 +142,7 @@

    Arith (Incremental Track)

    - + UltimateEliminator+MathSAT 0 41362557.756367.609000 @@ -151,7 +151,7 @@

    Arith (Incremental Track)

    - + z3-4.8.17n 0 388703611.7653610.534249203 @@ -160,7 +160,7 @@

    Arith (Incremental Track)

    - + smtinterpol 0 381121370.7311255.232325001 @@ -184,7 +184,6 @@

    Arith (Incremental Track)

    - + - diff --git a/archive/2022/results/arith-parallel.html b/archive/2022/results/arith-parallel.html index 6628ac8d..8eef57e1 100644 --- a/archive/2022/results/arith-parallel.html +++ b/archive/2022/results/arith-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Arith (Parallel Track)

    Competition results for the Arith - + division - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    Arith (Parallel Track)

    - + Vampire 7 1630642.8961601625-30 @@ -137,7 +137,7 @@

    Arith (Parallel Track)

    - + Vampire 1 01200.00001370 @@ -157,7 +157,7 @@

    Arith (Parallel Track)

    - + Vampire 6 167842.896160166160 @@ -177,7 +177,7 @@

    Arith (Parallel Track)

    - + Vampire 0 13752.191301328-328 @@ -201,7 +201,6 @@

    Arith (Parallel Track)

    - + - diff --git a/archive/2022/results/arith-proof-exhibition.html b/archive/2022/results/arith-proof-exhibition.html index 02896cc0..3938f6da 100644 --- a/archive/2022/results/arith-proof-exhibition.html +++ b/archive/2022/results/arith-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Arith (Proof Exhibition Track)

    Competition results for the Arith - + division - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    Arith (Proof Exhibition Track)

    - + cvc5-lfsc 0 2944 @@ -130,7 +130,7 @@

    Arith (Proof Exhibition Track)

    - + cvc5 0 2926 @@ -141,7 +141,7 @@

    Arith (Proof Exhibition Track)

    - + smtinterpol 0 477 @@ -152,7 +152,7 @@

    Arith (Proof Exhibition Track)

    - + veriT 0 169 @@ -174,7 +174,7 @@

    Arith (Proof Exhibition Track)

    - + cvc5-lfsc 0 2944227024.613226976.7311750175 @@ -183,7 +183,7 @@

    Arith (Proof Exhibition Track)

    - + cvc5 0 2926240679.278240614.0631930186 @@ -192,7 +192,7 @@

    Arith (Proof Exhibition Track)

    - + smtinterpol 0 47744451.16642149.282591205130 @@ -201,7 +201,7 @@

    Arith (Proof Exhibition Track)

    - + veriT 0 16936649.02536647.47897285327 @@ -225,7 +225,6 @@

    Arith (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/arith-single-query.html b/archive/2022/results/arith-single-query.html index 82a9afeb..9a7f5eec 100644 --- a/archive/2022/results/arith-single-query.html +++ b/archive/2022/results/arith-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Arith (Single Query Track)

    Competition results for the Arith - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    Arith (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5YicesQS - - + + cvc5 - - + + YicesQS - + @@ -131,7 +131,7 @@

    Arith (Single Query Track)

    - + 2021-z3n 0 1421 @@ -142,7 +142,7 @@

    Arith (Single Query Track)

    - + z3-4.8.17n 0 1414 @@ -153,7 +153,7 @@

    Arith (Single Query Track)

    - + cvc5 0 1410 @@ -164,7 +164,7 @@

    Arith (Single Query Track)

    - + YicesQS 0 1359 @@ -175,7 +175,7 @@

    Arith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 1210 @@ -186,7 +186,7 @@

    Arith (Single Query Track)

    - + Vampire 0 787 @@ -197,7 +197,7 @@

    Arith (Single Query Track)

    - + smtinterpol 0 261 @@ -208,7 +208,7 @@

    Arith (Single Query Track)

    - + veriT 0 75 @@ -230,7 +230,7 @@

    Arith (Single Query Track)

    - + 2021-z3n 0 1421170618.417170610.98914215818401890105 @@ -239,7 +239,7 @@

    Arith (Single Query Track)

    - + z3-4.8.17n 0 1414272658.159272646.37914145788361960191 @@ -248,7 +248,7 @@

    Arith (Single Query Track)

    - + cvc5 0 1410265589.23265602.4414105528582000200 @@ -257,7 +257,7 @@

    Arith (Single Query Track)

    - + YicesQS 0 1359301618.421301618.79113595687912510250 @@ -266,7 +266,7 @@

    Arith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 1212365409.656359080.56312124357773980280 @@ -275,7 +275,7 @@

    Arith (Single Query Track)

    - + Vampire 0 7901169413.233994363.40779037878200816 @@ -284,7 +284,7 @@

    Arith (Single Query Track)

    - + smtinterpol 0 261186009.206181129.14426192521042307136 @@ -293,7 +293,7 @@

    Arith (Single Query Track)

    - + veriT 0 7524507.65924481.68375075225131019 @@ -313,7 +313,7 @@

    Arith (Single Query Track)

    - + 2021-z3n 0 58142956.142955.83581581039990105 @@ -322,7 +322,7 @@

    Arith (Single Query Track)

    - + z3-4.8.17n 0 57857906.22357903.647578578042990191 @@ -331,7 +331,7 @@

    Arith (Single Query Track)

    - + YicesQS 0 56862451.36162451.56568568052990250 @@ -340,7 +340,7 @@

    Arith (Single Query Track)

    - + cvc5 0 55288246.388231.655552552068990200 @@ -349,7 +349,7 @@

    Arith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 435199212.279196217.7674354350185990280 @@ -358,7 +358,7 @@

    Arith (Single Query Track)

    - + smtinterpol 0 9140403.124138233.1289905361065136 @@ -367,7 +367,7 @@

    Arith (Single Query Track)

    - + Vampire 0 3837607.985740247.579330617990816 @@ -376,7 +376,7 @@

    Arith (Single Query Track)

    - + veriT 0 01196.0671186.716000137147319 @@ -396,7 +396,7 @@

    Arith (Single Query Track)

    - + cvc5 0 858176142.93176170.7858580858131621200 @@ -405,7 +405,7 @@

    Arith (Single Query Track)

    - + 2021-z3n 0 840126462.316126455.1598400840149621105 @@ -414,7 +414,7 @@

    Arith (Single Query Track)

    - + z3-4.8.17n 0 836213551.936213542.7328360836153621191 @@ -423,7 +423,7 @@

    Arith (Single Query Track)

    - + YicesQS 0 791237967.06237967.2317910791198621250 @@ -432,7 +432,7 @@

    Arith (Single Query Track)

    - + Vampire 0 787330605.248252915.8287870787202621816 @@ -441,7 +441,7 @@

    Arith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 777166192.658162860.0087770777212621280 @@ -450,7 +450,7 @@

    Arith (Single Query Track)

    - + smtinterpol 0 25245606.08142896.0162520252506852136 @@ -459,7 +459,7 @@

    Arith (Single Query Track)

    - + veriT 0 7523311.59223294.9687507588144719 @@ -479,7 +479,7 @@

    Arith (Single Query Track)

    - + YicesQS 0 13446718.3196718.57413445567882660265 @@ -488,7 +488,7 @@

    Arith (Single Query Track)

    - + 2021-z3n 0 13067009.6116997.34413065577493040227 @@ -497,7 +497,7 @@

    Arith (Single Query Track)

    - + z3-4.8.17n 0 12978743.8148728.56312975537443130313 @@ -506,7 +506,7 @@

    Arith (Single Query Track)

    - + cvc5 0 12808622.7748612.80812804957853300330 @@ -515,7 +515,7 @@

    Arith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 114917458.74713054.63411494057444610344 @@ -524,7 +524,7 @@

    Arith (Single Query Track)

    - + Vampire 0 49528451.98927231.4884953492111501114 @@ -533,7 +533,7 @@

    Arith (Single Query Track)

    - + smtinterpol 0 25510116.5846790.56925592461048307200 @@ -542,7 +542,7 @@

    Arith (Single Query Track)

    - + veriT 0 75804.494778.29675075225131024 @@ -566,7 +566,6 @@

    Arith (Single Query Track)

    - + - diff --git a/archive/2022/results/arith-unsat-core.html b/archive/2022/results/arith-unsat-core.html index 81bdb744..9967d7f2 100644 --- a/archive/2022/results/arith-unsat-core.html +++ b/archive/2022/results/arith-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Arith (Unsat Core Track)

    Competition results for the Arith - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    Arith (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    Arith (Unsat Core Track)

    - + cvc5 0 262 @@ -137,7 +137,7 @@

    Arith (Unsat Core Track)

    - + 2020-CVC4-ucn 0 220 @@ -148,7 +148,7 @@

    Arith (Unsat Core Track)

    - + z3-4.8.17n 0 69 @@ -159,7 +159,7 @@

    Arith (Unsat Core Track)

    - + Vampire 0 10 @@ -170,7 +170,7 @@

    Arith (Unsat Core Track)

    - + smtinterpol 0 0 @@ -181,7 +181,7 @@

    Arith (Unsat Core Track)

    - + UltimateEliminator+MathSAT 21 196 @@ -203,7 +203,7 @@

    Arith (Unsat Core Track)

    - + cvc5 0 26226016.74326012.03820 @@ -212,7 +212,7 @@

    Arith (Unsat Core Track)

    - + 2020-CVC4-ucn 0 22029119.20529119.17324 @@ -221,7 +221,7 @@

    Arith (Unsat Core Track)

    - + z3-4.8.17n 0 6939752.45539742.57330 @@ -230,7 +230,7 @@

    Arith (Unsat Core Track)

    - + Vampire 0 10121339.11101490.77981 @@ -239,7 +239,7 @@

    Arith (Unsat Core Track)

    - + smtinterpol 0 030097.04329781.40122 @@ -248,7 +248,7 @@

    Arith (Unsat Core Track)

    - + UltimateEliminator+MathSAT 21 19628412.93227355.27521 @@ -272,7 +272,6 @@

    Arith (Unsat Core Track)

    - + - diff --git a/archive/2022/results/aufbv-proof-exhibition.html b/archive/2022/results/aufbv-proof-exhibition.html index 8ef85806..f367807a 100644 --- a/archive/2022/results/aufbv-proof-exhibition.html +++ b/archive/2022/results/aufbv-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFBV (Proof Exhibition Track)

    Competition results for the AUFBV - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    AUFBV (Proof Exhibition Track)

    - + cvc5-lfsc 0 42 @@ -130,7 +130,7 @@

    AUFBV (Proof Exhibition Track)

    - + cvc5 0 2 @@ -152,7 +152,7 @@

    AUFBV (Proof Exhibition Track)

    - + cvc5-lfsc 0 42154919.367154918.343279117 @@ -161,7 +161,7 @@

    AUFBV (Proof Exhibition Track)

    - + cvc5 0 2191917.07191901.087319154 @@ -185,7 +185,6 @@

    AUFBV (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/aufbv-single-query.html b/archive/2022/results/aufbv-single-query.html index 61127379..1a39d7c9 100644 --- a/archive/2022/results/aufbv-single-query.html +++ b/archive/2022/results/aufbv-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFBV (Single Query Track)

    Competition results for the AUFBV - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    AUFBV (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) BitwuzlaBitwuzlacvc5 - - + + Bitwuzla - - + + Bitwuzla - + @@ -131,7 +131,7 @@

    AUFBV (Single Query Track)

    - + Bitwuzla 0 307 @@ -142,7 +142,7 @@

    AUFBV (Single Query Track)

    - + z3-4.8.17n 0 202 @@ -153,7 +153,7 @@

    AUFBV (Single Query Track)

    - + cvc5 0 195 @@ -164,7 +164,7 @@

    AUFBV (Single Query Track)

    - + 2021-cvc5n 0 170 @@ -175,7 +175,7 @@

    AUFBV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 7 @@ -197,7 +197,7 @@

    AUFBV (Single Query Track)

    - + Bitwuzla 0 307564948.967564952.543074303454454 @@ -206,7 +206,7 @@

    AUFBV (Single Query Track)

    - + z3-4.8.17n 0 202641044.156641031.6420273129559490 @@ -215,7 +215,7 @@

    AUFBV (Single Query Track)

    - + cvc5 0 195640832.573645193.4041956189566465 @@ -224,7 +224,7 @@

    AUFBV (Single Query Track)

    - + 2021-cvc5n 0 170668010.224669724.7741705165591518 @@ -233,7 +233,7 @@

    AUFBV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 7843813.544842709.975707754694 @@ -253,7 +253,7 @@

    AUFBV (Single Query Track)

    - + z3-4.8.17n 0 735884.3635885.16737303685490 @@ -262,7 +262,7 @@

    AUFBV (Single Query Track)

    - + cvc5 0 644767.44245657.58266070685465 @@ -271,7 +271,7 @@

    AUFBV (Single Query Track)

    - + 2021-cvc5n 0 552171.29852699.96255071685518 @@ -280,7 +280,7 @@

    AUFBV (Single Query Track)

    - + Bitwuzla 0 487794.0587794.15544072685454 @@ -289,7 +289,7 @@

    AUFBV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 078267.49278104.76600076685694 @@ -309,7 +309,7 @@

    AUFBV (Single Query Track)

    - + Bitwuzla 0 30352354.91752358.385303030328430454 @@ -318,7 +318,7 @@

    AUFBV (Single Query Track)

    - + cvc5 0 189183692.348186182.9981890189142430465 @@ -327,7 +327,7 @@

    AUFBV (Single Query Track)

    - + 2021-cvc5n 0 165198839.076199653.7851650165166430518 @@ -336,7 +336,7 @@

    AUFBV (Single Query Track)

    - + z3-4.8.17n 0 129244810.852244794.7761290129202430490 @@ -345,7 +345,7 @@

    AUFBV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 7346904.082346028.08707324430694 @@ -365,7 +365,7 @@

    AUFBV (Single Query Track)

    - + Bitwuzla 0 20614095.28514095.4262062204555555 @@ -374,7 +374,7 @@

    AUFBV (Single Query Track)

    - + 2021-cvc5n 0 14215327.67315300.1331422140619609 @@ -383,7 +383,7 @@

    AUFBV (Single Query Track)

    - + cvc5 0 14215313.64215322.2091422140619608 @@ -392,7 +392,7 @@

    AUFBV (Single Query Track)

    - + z3-4.8.17n 0 13815533.29915514.2271385880623610 @@ -401,7 +401,7 @@

    AUFBV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 518227.84417871.236505756728 @@ -425,7 +425,6 @@

    AUFBV (Single Query Track)

    - + - diff --git a/archive/2022/results/aufbvdtlia-proof-exhibition.html b/archive/2022/results/aufbvdtlia-proof-exhibition.html index 57fdd912..6dd150da 100644 --- a/archive/2022/results/aufbvdtlia-proof-exhibition.html +++ b/archive/2022/results/aufbvdtlia-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFBVDTLIA (Proof Exhibition Track)

    Competition results for the AUFBVDTLIA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    AUFBVDTLIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 112 @@ -130,7 +130,7 @@

    AUFBVDTLIA (Proof Exhibition Track)

    - + cvc5 0 98 @@ -152,7 +152,7 @@

    AUFBVDTLIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 11270869.26270869.2975958 @@ -161,7 +161,7 @@

    AUFBVDTLIA (Proof Exhibition Track)

    - + cvc5 0 9886321.85586299.7317370 @@ -185,7 +185,6 @@

    AUFBVDTLIA (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/aufbvdtlia-single-query.html b/archive/2022/results/aufbvdtlia-single-query.html index 2579d990..2b1ef336 100644 --- a/archive/2022/results/aufbvdtlia-single-query.html +++ b/archive/2022/results/aufbvdtlia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFBVDTLIA (Single Query Track)

    Competition results for the AUFBVDTLIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    AUFBVDTLIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    AUFBVDTLIA (Single Query Track)

    - + cvc5 0 358 @@ -142,7 +142,7 @@

    AUFBVDTLIA (Single Query Track)

    - + 2021-cvc5n 0 288 @@ -153,7 +153,7 @@

    AUFBVDTLIA (Single Query Track)

    - + z3-4.8.17n 0 150 @@ -164,7 +164,7 @@

    AUFBVDTLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -186,7 +186,7 @@

    AUFBVDTLIA (Single Query Track)

    - + cvc5 0 358496018.197498998.867358209149384358 @@ -195,7 +195,7 @@

    AUFBVDTLIA (Single Query Track)

    - + 2021-cvc5n 0 288558929.54560526.701288157131454430 @@ -204,7 +204,7 @@

    AUFBVDTLIA (Single Query Track)

    - + z3-4.8.17n 0 150669496.648669592.1041505991592547 @@ -213,7 +213,7 @@

    AUFBVDTLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 03419.1842100.5270007420 @@ -233,7 +233,7 @@

    AUFBVDTLIA (Single Query Track)

    - + cvc5 0 20947612.95550069.01120920902531358 @@ -242,7 +242,7 @@

    AUFBVDTLIA (Single Query Track)

    - + 2021-cvc5n 0 15793042.56994406.481157157054531430 @@ -251,7 +251,7 @@

    AUFBVDTLIA (Single Query Track)

    - + z3-4.8.17n 0 59179263.1179263.51859590152531547 @@ -260,7 +260,7 @@

    AUFBVDTLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0976.336606.2980002115310 @@ -280,7 +280,7 @@

    AUFBVDTLIA (Single Query Track)

    - + cvc5 0 14919224.22319682.59214901490593358 @@ -289,7 +289,7 @@

    AUFBVDTLIA (Single Query Track)

    - + 2021-cvc5n 0 13134269.1334476.847131013118593430 @@ -298,7 +298,7 @@

    AUFBVDTLIA (Single Query Track)

    - + z3-4.8.17n 0 9170113.98670114.0529109158593547 @@ -307,7 +307,7 @@

    AUFBVDTLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0672.324416.0460001495930 @@ -327,7 +327,7 @@

    AUFBVDTLIA (Single Query Track)

    - + z3-4.8.17n 0 14314222.79814222.5361435390599591 @@ -336,7 +336,7 @@

    AUFBVDTLIA (Single Query Track)

    - + cvc5 0 10415334.57815335.306104995638638 @@ -345,7 +345,7 @@

    AUFBVDTLIA (Single Query Track)

    - + 2021-cvc5n 0 10215376.92415376.943102795640640 @@ -354,7 +354,7 @@

    AUFBVDTLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 03419.1842100.5270007420 @@ -378,7 +378,6 @@

    AUFBVDTLIA (Single Query Track)

    - + - diff --git a/archive/2022/results/aufbvdtlia-unsat-core.html b/archive/2022/results/aufbvdtlia-unsat-core.html index fa24f9f7..de1bf59b 100644 --- a/archive/2022/results/aufbvdtlia-unsat-core.html +++ b/archive/2022/results/aufbvdtlia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFBVDTLIA (Unsat Core Track)

    Competition results for the AUFBVDTLIA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    AUFBVDTLIA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    AUFBVDTLIA (Unsat Core Track)

    - + 2021-cvc5-ucn 0 60 @@ -137,7 +137,7 @@

    AUFBVDTLIA (Unsat Core Track)

    - + cvc5 0 60 @@ -148,7 +148,7 @@

    AUFBVDTLIA (Unsat Core Track)

    - + z3-4.8.17n 0 55 @@ -159,7 +159,7 @@

    AUFBVDTLIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -181,7 +181,7 @@

    AUFBVDTLIA (Unsat Core Track)

    - + 2021-cvc5-ucn 0 6064180.55664180.57753 @@ -190,7 +190,7 @@

    AUFBVDTLIA (Unsat Core Track)

    - + cvc5 0 6067403.04267403.07755 @@ -199,7 +199,7 @@

    AUFBVDTLIA (Unsat Core Track)

    - + z3-4.8.17n 0 5565859.57765859.56954 @@ -208,7 +208,7 @@

    AUFBVDTLIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0767.573456.0220 @@ -232,7 +232,6 @@

    AUFBVDTLIA (Unsat Core Track)

    - + - diff --git a/archive/2022/results/aufbvdtnia-proof-exhibition.html b/archive/2022/results/aufbvdtnia-proof-exhibition.html index 6e2d80a4..8e750a33 100644 --- a/archive/2022/results/aufbvdtnia-proof-exhibition.html +++ b/archive/2022/results/aufbvdtnia-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFBVDTNIA (Proof Exhibition Track)

    Competition results for the AUFBVDTNIA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    AUFBVDTNIA (Proof Exhibition Track)

    - + cvc5 0 3 @@ -130,7 +130,7 @@

    AUFBVDTNIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 3 @@ -152,7 +152,7 @@

    AUFBVDTNIA (Proof Exhibition Track)

    - + cvc5 0 30.4120.40700 @@ -161,7 +161,7 @@

    AUFBVDTNIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 30.5790.57700 @@ -185,7 +185,6 @@

    AUFBVDTNIA (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/aufbvdtnia-single-query.html b/archive/2022/results/aufbvdtnia-single-query.html index a6544598..c8a2f904 100644 --- a/archive/2022/results/aufbvdtnia-single-query.html +++ b/archive/2022/results/aufbvdtnia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFBVDTNIA (Single Query Track)

    Competition results for the AUFBVDTNIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    AUFBVDTNIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5— - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    AUFBVDTNIA (Single Query Track)

    - + 2021-cvc5n 0 3 @@ -142,7 +142,7 @@

    AUFBVDTNIA (Single Query Track)

    - + cvc5 0 3 @@ -153,7 +153,7 @@

    AUFBVDTNIA (Single Query Track)

    - + z3-4.8.17n 0 2 @@ -164,7 +164,7 @@

    AUFBVDTNIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -186,7 +186,7 @@

    AUFBVDTNIA (Single Query Track)

    - + 2021-cvc5n 0 33600.2133600.2130353 @@ -195,7 +195,7 @@

    AUFBVDTNIA (Single Query Track)

    - + cvc5 0 33600.2843600.2830353 @@ -204,7 +204,7 @@

    AUFBVDTNIA (Single Query Track)

    - + z3-4.8.17n 0 24858.674858.70420264 @@ -213,7 +213,7 @@

    AUFBVDTNIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 036.31127.21400080 @@ -233,7 +233,7 @@

    AUFBVDTNIA (Single Query Track)

    - + 2021-cvc5n 0 00.00.0000083 @@ -242,7 +242,7 @@

    AUFBVDTNIA (Single Query Track)

    - + cvc5 0 00.00.0000083 @@ -251,7 +251,7 @@

    AUFBVDTNIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.0000080 @@ -260,7 +260,7 @@

    AUFBVDTNIA (Single Query Track)

    - + z3-4.8.17n 0 00.00.0000084 @@ -280,7 +280,7 @@

    AUFBVDTNIA (Single Query Track)

    - + 2021-cvc5n 0 30.170.168303053 @@ -289,7 +289,7 @@

    AUFBVDTNIA (Single Query Track)

    - + cvc5 0 30.2150.214303053 @@ -298,7 +298,7 @@

    AUFBVDTNIA (Single Query Track)

    - + z3-4.8.17n 0 21200.0751200.071202154 @@ -307,7 +307,7 @@

    AUFBVDTNIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 013.8118.38000350 @@ -327,7 +327,7 @@

    AUFBVDTNIA (Single Query Track)

    - + 2021-cvc5n 0 372.21372.2130353 @@ -336,7 +336,7 @@

    AUFBVDTNIA (Single Query Track)

    - + cvc5 0 372.28472.2830353 @@ -345,7 +345,7 @@

    AUFBVDTNIA (Single Query Track)

    - + z3-4.8.17n 0 2138.676138.67220265 @@ -354,7 +354,7 @@

    AUFBVDTNIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 036.31127.21400080 @@ -378,7 +378,6 @@

    AUFBVDTNIA (Single Query Track)

    - + - diff --git a/archive/2022/results/aufbvdtnia-unsat-core.html b/archive/2022/results/aufbvdtnia-unsat-core.html index a1e6f7ff..9db0f91e 100644 --- a/archive/2022/results/aufbvdtnia-unsat-core.html +++ b/archive/2022/results/aufbvdtnia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFBVDTNIA (Unsat Core Track)

    Competition results for the AUFBVDTNIA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    AUFBVDTNIA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    AUFBVDTNIA (Unsat Core Track)

    - + 2021-cvc5-ucn 0 4 @@ -137,7 +137,7 @@

    AUFBVDTNIA (Unsat Core Track)

    - + cvc5 0 4 @@ -148,7 +148,7 @@

    AUFBVDTNIA (Unsat Core Track)

    - + z3-4.8.17n 0 3 @@ -159,7 +159,7 @@

    AUFBVDTNIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -181,7 +181,7 @@

    AUFBVDTNIA (Unsat Core Track)

    - + 2021-cvc5-ucn 0 40.1730.1720 @@ -190,7 +190,7 @@

    AUFBVDTNIA (Unsat Core Track)

    - + cvc5 0 40.2180.2170 @@ -199,7 +199,7 @@

    AUFBVDTNIA (Unsat Core Track)

    - + z3-4.8.17n 0 31200.0581200.0541 @@ -208,7 +208,7 @@

    AUFBVDTNIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 018.3288.370 @@ -232,7 +232,6 @@

    AUFBVDTNIA (Unsat Core Track)

    - + - diff --git a/archive/2022/results/aufbvdtnira-proof-exhibition.html b/archive/2022/results/aufbvdtnira-proof-exhibition.html index 05f3db63..9ff06d1a 100644 --- a/archive/2022/results/aufbvdtnira-proof-exhibition.html +++ b/archive/2022/results/aufbvdtnira-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFBVDTNIRA (Proof Exhibition Track)

    Competition results for the AUFBVDTNIRA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    AUFBVDTNIRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 129 @@ -130,7 +130,7 @@

    AUFBVDTNIRA (Proof Exhibition Track)

    - + cvc5 0 17 @@ -152,7 +152,7 @@

    AUFBVDTNIRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 1291114484.7181114469.594933296 @@ -161,7 +161,7 @@

    AUFBVDTNIRA (Proof Exhibition Track)

    - + cvc5 0 171242932.841242861.7211045365 @@ -185,7 +185,6 @@

    AUFBVDTNIRA (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/aufbvdtnira-single-query.html b/archive/2022/results/aufbvdtnira-single-query.html index 51ff1e90..9564e4ce 100644 --- a/archive/2022/results/aufbvdtnira-single-query.html +++ b/archive/2022/results/aufbvdtnira-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFBVDTNIRA (Single Query Track)

    Competition results for the AUFBVDTNIRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    AUFBVDTNIRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5— - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    AUFBVDTNIRA (Single Query Track)

    - + z3-4.8.17n 0 368 @@ -142,7 +142,7 @@

    AUFBVDTNIRA (Single Query Track)

    - + cvc5 0 131 @@ -153,7 +153,7 @@

    AUFBVDTNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -175,7 +175,7 @@

    AUFBVDTNIRA (Single Query Track)

    - + z3-4.8.17n 0 368797256.241797231.1943680368694655 @@ -184,7 +184,7 @@

    AUFBVDTNIRA (Single Query Track)

    - + cvc5 0 1311113322.8121113313.3561310131931927 @@ -193,7 +193,7 @@

    AUFBVDTNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 04930.1792915.7700010620 @@ -213,7 +213,7 @@

    AUFBVDTNIRA (Single Query Track)

    - + cvc5 0 00.00.000001062927 @@ -222,7 +222,7 @@

    AUFBVDTNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.0000010620 @@ -231,7 +231,7 @@

    AUFBVDTNIRA (Single Query Track)

    - + z3-4.8.17n 0 00.00.000001062655 @@ -251,7 +251,7 @@

    AUFBVDTNIRA (Single Query Track)

    - + z3-4.8.17n 0 368797256.241797231.19436803686940655 @@ -260,7 +260,7 @@

    AUFBVDTNIRA (Single Query Track)

    - + cvc5 0 1311113322.8121113313.35613101319310927 @@ -269,7 +269,7 @@

    AUFBVDTNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 04930.1792915.77000106200 @@ -289,7 +289,7 @@

    AUFBVDTNIRA (Single Query Track)

    - + z3-4.8.17n 0 32418408.09118380.8723240324738710 @@ -298,7 +298,7 @@

    AUFBVDTNIRA (Single Query Track)

    - + cvc5 0 12622785.70122776.1831260126936932 @@ -307,7 +307,7 @@

    AUFBVDTNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 04930.1792915.7700010620 @@ -331,7 +331,6 @@

    AUFBVDTNIRA (Single Query Track)

    - + - diff --git a/archive/2022/results/aufbvdtnira-unsat-core.html b/archive/2022/results/aufbvdtnira-unsat-core.html index de848791..fee9edac 100644 --- a/archive/2022/results/aufbvdtnira-unsat-core.html +++ b/archive/2022/results/aufbvdtnira-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFBVDTNIRA (Unsat Core Track)

    Competition results for the AUFBVDTNIRA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    AUFBVDTNIRA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    AUFBVDTNIRA (Unsat Core Track)

    - + z3-4.8.17n 0 78462 @@ -137,7 +137,7 @@

    AUFBVDTNIRA (Unsat Core Track)

    - + cvc5 0 39599 @@ -148,7 +148,7 @@

    AUFBVDTNIRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -170,7 +170,7 @@

    AUFBVDTNIRA (Unsat Core Track)

    - + z3-4.8.17n 0 78462671697.066671706.956481 @@ -179,7 +179,7 @@

    AUFBVDTNIRA (Unsat Core Track)

    - + cvc5 0 395991110405.9191110400.143923 @@ -188,7 +188,7 @@

    AUFBVDTNIRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 04945.6492915.8060 @@ -212,7 +212,6 @@

    AUFBVDTNIRA (Unsat Core Track)

    - + - diff --git a/archive/2022/results/aufbvfp-proof-exhibition.html b/archive/2022/results/aufbvfp-proof-exhibition.html index 6f008367..1f972fb2 100644 --- a/archive/2022/results/aufbvfp-proof-exhibition.html +++ b/archive/2022/results/aufbvfp-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFBVFP (Proof Exhibition Track)

    Competition results for the AUFBVFP - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    AUFBVFP (Proof Exhibition Track)

    - + cvc5-lfsc 0 2 @@ -130,7 +130,7 @@

    AUFBVFP (Proof Exhibition Track)

    - + cvc5 0 0 @@ -152,7 +152,7 @@

    AUFBVFP (Proof Exhibition Track)

    - + cvc5-lfsc 0 215762.92115762.9362212 @@ -161,7 +161,7 @@

    AUFBVFP (Proof Exhibition Track)

    - + cvc5 0 019021.31719021.3842414 @@ -185,7 +185,6 @@

    AUFBVFP (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/aufbvfp-single-query.html b/archive/2022/results/aufbvfp-single-query.html index 34fd9306..07945248 100644 --- a/archive/2022/results/aufbvfp-single-query.html +++ b/archive/2022/results/aufbvfp-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFBVFP (Single Query Track)

    Competition results for the AUFBVFP - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    AUFBVFP (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) BitwuzlaBitwuzla— - - + + Bitwuzla - - + + Bitwuzla - + @@ -131,7 +131,7 @@

    AUFBVFP (Single Query Track)

    - + Bitwuzla 0 22 @@ -142,7 +142,7 @@

    AUFBVFP (Single Query Track)

    - + cvc5 0 19 @@ -153,7 +153,7 @@

    AUFBVFP (Single Query Track)

    - + 2021-cvc5n 0 17 @@ -164,7 +164,7 @@

    AUFBVFP (Single Query Track)

    - + z3-4.8.17n 0 6 @@ -175,7 +175,7 @@

    AUFBVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -197,7 +197,7 @@

    AUFBVFP (Single Query Track)

    - + Bitwuzla 0 2244426.50644427.291220223535 @@ -206,7 +206,7 @@

    AUFBVFP (Single Query Track)

    - + cvc5 0 1945112.67145683.13190193833 @@ -215,7 +215,7 @@

    AUFBVFP (Single Query Track)

    - + 2021-cvc5n 0 1746460.4746853.688170174036 @@ -224,7 +224,7 @@

    AUFBVFP (Single Query Track)

    - + z3-4.8.17n 0 656410.91156411.2356245145 @@ -233,7 +233,7 @@

    AUFBVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 028170.35228165.5310005723 @@ -253,7 +253,7 @@

    AUFBVFP (Single Query Track)

    - + z3-4.8.17n 0 238.51538.51422005545 @@ -262,7 +262,7 @@

    AUFBVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 01206.691207.33700025523 @@ -271,7 +271,7 @@

    AUFBVFP (Single Query Track)

    - + 2021-cvc5n 0 01873.3911874.04800025536 @@ -280,7 +280,7 @@

    AUFBVFP (Single Query Track)

    - + cvc5 0 01894.7541897.87500025533 @@ -289,7 +289,7 @@

    AUFBVFP (Single Query Track)

    - + Bitwuzla 0 02400.02400.000025535 @@ -309,7 +309,7 @@

    AUFBVFP (Single Query Track)

    - + Bitwuzla 0 224826.5064827.2912202223335 @@ -318,7 +318,7 @@

    AUFBVFP (Single Query Track)

    - + cvc5 0 199392.1579892.4651901953333 @@ -327,7 +327,7 @@

    AUFBVFP (Single Query Track)

    - + 2021-cvc5n 0 179840.02310162.7251701773336 @@ -336,7 +336,7 @@

    AUFBVFP (Single Query Track)

    - + z3-4.8.17n 0 422899.03622899.286404203345 @@ -345,7 +345,7 @@

    AUFBVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 011223.86811211.66000243323 @@ -365,7 +365,7 @@

    AUFBVFP (Single Query Track)

    - + Bitwuzla 0 151053.871053.883150154242 @@ -374,7 +374,7 @@

    AUFBVFP (Single Query Track)

    - + cvc5 0 141164.0061164.013140144343 @@ -383,7 +383,7 @@

    AUFBVFP (Single Query Track)

    - + 2021-cvc5n 0 131161.8541161.87130134444 @@ -392,7 +392,7 @@

    AUFBVFP (Single Query Track)

    - + z3-4.8.17n 0 21298.4411298.4352115553 @@ -401,7 +401,7 @@

    AUFBVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0802.279830.7060005724 @@ -425,7 +425,6 @@

    AUFBVFP (Single Query Track)

    - + - diff --git a/archive/2022/results/aufdtlia-proof-exhibition.html b/archive/2022/results/aufdtlia-proof-exhibition.html index 871daa23..3115be30 100644 --- a/archive/2022/results/aufdtlia-proof-exhibition.html +++ b/archive/2022/results/aufdtlia-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFDTLIA (Proof Exhibition Track)

    Competition results for the AUFDTLIA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    AUFDTLIA (Proof Exhibition Track)

    - + smtinterpol 0 74 @@ -130,7 +130,7 @@

    AUFDTLIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 60 @@ -141,7 +141,7 @@

    AUFDTLIA (Proof Exhibition Track)

    - + cvc5 0 28 @@ -163,7 +163,7 @@

    AUFDTLIA (Proof Exhibition Track)

    - + smtinterpol 0 743089.8132680.187142 @@ -172,7 +172,7 @@

    AUFDTLIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 6037293.85637268.072828 @@ -181,7 +181,7 @@

    AUFDTLIA (Proof Exhibition Track)

    - + cvc5 0 2872121.88472109.8156059 @@ -205,7 +205,6 @@

    AUFDTLIA (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/aufdtlia-single-query.html b/archive/2022/results/aufdtlia-single-query.html index d581730f..b94f80d2 100644 --- a/archive/2022/results/aufdtlia-single-query.html +++ b/archive/2022/results/aufdtlia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFDTLIA (Single Query Track)

    Competition results for the AUFDTLIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    AUFDTLIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    AUFDTLIA (Single Query Track)

    - + cvc5 0 181 @@ -142,7 +142,7 @@

    AUFDTLIA (Single Query Track)

    - + 2020-CVC4n 0 181 @@ -153,7 +153,7 @@

    AUFDTLIA (Single Query Track)

    - + z3-4.8.17n 0 138 @@ -164,7 +164,7 @@

    AUFDTLIA (Single Query Track)

    - + Vampire 0 88 @@ -175,7 +175,7 @@

    AUFDTLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -186,7 +186,7 @@

    AUFDTLIA (Single Query Track)

    - + smtinterpol-fixedn 1 88 @@ -197,7 +197,7 @@

    AUFDTLIA (Single Query Track)

    - + smtinterpol 1 75 @@ -219,7 +219,7 @@

    AUFDTLIA (Single Query Track)

    - + 2020-CVC4n 0 18131706.89332019.301181938877 @@ -228,7 +228,7 @@

    AUFDTLIA (Single Query Track)

    - + cvc5 0 18131826.41232203.64181938877 @@ -237,7 +237,7 @@

    AUFDTLIA (Single Query Track)

    - + z3-4.8.17n 0 13859947.73559947.68513845935049 @@ -246,7 +246,7 @@

    AUFDTLIA (Single Query Track)

    - + Vampire 0 88149513.159121966.78388088100100 @@ -255,7 +255,7 @@

    AUFDTLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0871.749520.6950001880 @@ -264,7 +264,7 @@

    AUFDTLIA (Single Query Track)

    - + smtinterpol-fixedn 1 8845977.48544953.0618818710035 @@ -273,7 +273,7 @@

    AUFDTLIA (Single Query Track)

    - + smtinterpol 1 7550253.649301.1967517411339 @@ -293,7 +293,7 @@

    AUFDTLIA (Single Query Track)

    - + 2020-CVC4n 0 9322884.43623194.177939300957 @@ -302,7 +302,7 @@

    AUFDTLIA (Single Query Track)

    - + cvc5 0 9323242.33823619.561939300957 @@ -311,7 +311,7 @@

    AUFDTLIA (Single Query Track)

    - + z3-4.8.17n 0 4557006.51957006.57345450489549 @@ -320,7 +320,7 @@

    AUFDTLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0422.124253.56200093950 @@ -329,7 +329,7 @@

    AUFDTLIA (Single Query Track)

    - + Vampire 0 0133201.09111566.550009395100 @@ -338,7 +338,7 @@

    AUFDTLIA (Single Query Track)

    - + smtinterpol-fixedn 1 137508.24237314.241110929535 @@ -347,7 +347,7 @@

    AUFDTLIA (Single Query Track)

    - + smtinterpol 1 139745.55939586.053110929539 @@ -367,7 +367,7 @@

    AUFDTLIA (Single Query Track)

    - + z3-4.8.17n 0 932941.2162941.1129309329349 @@ -376,7 +376,7 @@

    AUFDTLIA (Single Query Track)

    - + cvc5 0 888584.0758584.079880887937 @@ -385,7 +385,7 @@

    AUFDTLIA (Single Query Track)

    - + 2020-CVC4n 0 888822.4568825.124880887937 @@ -394,7 +394,7 @@

    AUFDTLIA (Single Query Track)

    - + Vampire 0 8816312.06910400.23388088793100 @@ -403,7 +403,7 @@

    AUFDTLIA (Single Query Track)

    - + smtinterpol-fixedn 0 878469.2447638.828708789335 @@ -412,7 +412,7 @@

    AUFDTLIA (Single Query Track)

    - + smtinterpol 0 7410508.0419715.14374074219339 @@ -421,7 +421,7 @@

    AUFDTLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0449.625267.13300095930 @@ -441,7 +441,7 @@

    AUFDTLIA (Single Query Track)

    - + cvc5 0 1331520.0131520.93113345885555 @@ -450,7 +450,7 @@

    AUFDTLIA (Single Query Track)

    - + 2020-CVC4n 0 1321480.231480.43913245875656 @@ -459,7 +459,7 @@

    AUFDTLIA (Single Query Track)

    - + z3-4.8.17n 0 1311385.5991385.35313145865757 @@ -468,7 +468,7 @@

    AUFDTLIA (Single Query Track)

    - + smtinterpol-fixedn 0 792198.0411524.3137917810949 @@ -477,7 +477,7 @@

    AUFDTLIA (Single Query Track)

    - + smtinterpol 0 742214.0921481.2727417311446 @@ -486,7 +486,7 @@

    AUFDTLIA (Single Query Track)

    - + Vampire 0 684444.2043285.92268068120120 @@ -495,7 +495,7 @@

    AUFDTLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0871.749520.6950001880 @@ -519,7 +519,6 @@

    AUFDTLIA (Single Query Track)

    - + - diff --git a/archive/2022/results/aufdtlia-unsat-core.html b/archive/2022/results/aufdtlia-unsat-core.html index 745d0e5d..1a033a63 100644 --- a/archive/2022/results/aufdtlia-unsat-core.html +++ b/archive/2022/results/aufdtlia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFDTLIA (Unsat Core Track)

    Competition results for the AUFDTLIA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    AUFDTLIA (Unsat Core Track)

    Sequential PerformanceParallel Performance smtinterpolsmtinterpol - - + + @@ -126,7 +126,7 @@

    AUFDTLIA (Unsat Core Track)

    - + 2021-cvc5-ucn 0 61752 @@ -137,7 +137,7 @@

    AUFDTLIA (Unsat Core Track)

    - + z3-4.8.17n 0 61570 @@ -148,7 +148,7 @@

    AUFDTLIA (Unsat Core Track)

    - + smtinterpol 0 61048 @@ -159,7 +159,7 @@

    AUFDTLIA (Unsat Core Track)

    - + Vampire 0 60825 @@ -170,7 +170,7 @@

    AUFDTLIA (Unsat Core Track)

    - + cvc5 0 1341 @@ -181,7 +181,7 @@

    AUFDTLIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -203,7 +203,7 @@

    AUFDTLIA (Unsat Core Track)

    - + 2021-cvc5-ucn 0 61752173.597167.4250 @@ -212,7 +212,7 @@

    AUFDTLIA (Unsat Core Track)

    - + z3-4.8.17n 0 615703638.6923638.533 @@ -221,7 +221,7 @@

    AUFDTLIA (Unsat Core Track)

    - + smtinterpol 0 610483703.4122938.3952 @@ -230,7 +230,7 @@

    AUFDTLIA (Unsat Core Track)

    - + Vampire 0 608257402.9531872.7890 @@ -239,7 +239,7 @@

    AUFDTLIA (Unsat Core Track)

    - + cvc5 0 1341561.01561.030 @@ -248,7 +248,7 @@

    AUFDTLIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0403.745242.4490 @@ -272,7 +272,6 @@

    AUFDTLIA (Unsat Core Track)

    - + - diff --git a/archive/2022/results/aufdtlira-cloud.html b/archive/2022/results/aufdtlira-cloud.html index 19bc94d5..78042e0e 100644 --- a/archive/2022/results/aufdtlira-cloud.html +++ b/archive/2022/results/aufdtlira-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFDTLIRA (Cloud Track)

    Competition results for the AUFDTLIRA - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    AUFDTLIRA (Cloud Track)

    - + cvc5-cloud 0 219204.469202160 @@ -126,7 +126,7 @@

    AUFDTLIRA (Cloud Track)

    - + Vampire 0 021600.0000180 @@ -146,7 +146,7 @@

    AUFDTLIRA (Cloud Track)

    - + cvc5-cloud 0 00.00000180 @@ -155,7 +155,7 @@

    AUFDTLIRA (Cloud Track)

    - + Vampire 0 00.00000180 @@ -175,7 +175,7 @@

    AUFDTLIRA (Cloud Track)

    - + cvc5-cloud 0 24.4692020160 @@ -184,7 +184,7 @@

    AUFDTLIRA (Cloud Track)

    - + Vampire 0 02400.00002160 @@ -204,7 +204,7 @@

    AUFDTLIRA (Cloud Track)

    - + cvc5-cloud 0 2388.4692021616 @@ -213,7 +213,7 @@

    AUFDTLIRA (Cloud Track)

    - + Vampire 0 0432.00001818 @@ -237,7 +237,6 @@

    AUFDTLIRA (Cloud Track)

    - + - diff --git a/archive/2022/results/aufdtlira-parallel.html b/archive/2022/results/aufdtlira-parallel.html index 625d651c..301856f4 100644 --- a/archive/2022/results/aufdtlira-parallel.html +++ b/archive/2022/results/aufdtlira-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFDTLIRA (Parallel Track)

    Competition results for the AUFDTLIRA - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    AUFDTLIRA (Parallel Track)

    - + Vampire 0 121548.31101170 @@ -137,7 +137,7 @@

    AUFDTLIRA (Parallel Track)

    - + Vampire 0 00.00000180 @@ -157,7 +157,7 @@

    AUFDTLIRA (Parallel Track)

    - + Vampire 0 11148.311010170 @@ -177,7 +177,7 @@

    AUFDTLIRA (Parallel Track)

    - + Vampire 0 0432.00001818 @@ -201,7 +201,6 @@

    AUFDTLIRA (Parallel Track)

    - + - diff --git a/archive/2022/results/aufdtlira-proof-exhibition.html b/archive/2022/results/aufdtlira-proof-exhibition.html index 9770cef6..02ce630c 100644 --- a/archive/2022/results/aufdtlira-proof-exhibition.html +++ b/archive/2022/results/aufdtlira-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFDTLIRA (Proof Exhibition Track)

    Competition results for the AUFDTLIRA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    AUFDTLIRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 4935 @@ -130,7 +130,7 @@

    AUFDTLIRA (Proof Exhibition Track)

    - + cvc5 0 4748 @@ -141,7 +141,7 @@

    AUFDTLIRA (Proof Exhibition Track)

    - + smtinterpol 0 4525 @@ -163,7 +163,7 @@

    AUFDTLIRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 493514229.30114221.2064210 @@ -172,7 +172,7 @@

    AUFDTLIRA (Proof Exhibition Track)

    - + cvc5 0 4748177179.981176068.823229106 @@ -181,7 +181,7 @@

    AUFDTLIRA (Proof Exhibition Track)

    - + smtinterpol 0 4525466402.338447823.533452348 @@ -205,7 +205,6 @@

    AUFDTLIRA (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/aufdtlira-single-query.html b/archive/2022/results/aufdtlira-single-query.html index 7eaa8183..f3aad874 100644 --- a/archive/2022/results/aufdtlira-single-query.html +++ b/archive/2022/results/aufdtlira-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFDTLIRA (Single Query Track)

    Competition results for the AUFDTLIRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    AUFDTLIRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5— - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    AUFDTLIRA (Single Query Track)

    - + cvc5 0 3655 @@ -142,7 +142,7 @@

    AUFDTLIRA (Single Query Track)

    - + 2020-CVC4n 0 3628 @@ -153,7 +153,7 @@

    AUFDTLIRA (Single Query Track)

    - + z3-4.8.17n 0 3622 @@ -164,7 +164,7 @@

    AUFDTLIRA (Single Query Track)

    - + Vampire 0 3271 @@ -175,7 +175,7 @@

    AUFDTLIRA (Single Query Track)

    - + smtinterpol 0 3263 @@ -186,7 +186,7 @@

    AUFDTLIRA (Single Query Track)

    - + smtinterpol-fixedn 0 3095 @@ -197,7 +197,7 @@

    AUFDTLIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -219,7 +219,7 @@

    AUFDTLIRA (Single Query Track)

    - + cvc5 0 3655642594.26642643.147365503655530530 @@ -228,7 +228,7 @@

    AUFDTLIRA (Single Query Track)

    - + 2020-CVC4n 0 362859080.75359078.04636280362855748 @@ -237,7 +237,7 @@

    AUFDTLIRA (Single Query Track)

    - + z3-4.8.17n 0 362223268.45923817.74736220362256317 @@ -246,7 +246,7 @@

    AUFDTLIRA (Single Query Track)

    - + Vampire 0 34211947479.8161137080.171342103421764754 @@ -255,7 +255,7 @@

    AUFDTLIRA (Single Query Track)

    - + smtinterpol 0 3264534074.17507724.095326403264921383 @@ -264,7 +264,7 @@

    AUFDTLIRA (Single Query Track)

    - + smtinterpol-fixedn 0 30961132954.4811113767.063096030961089902 @@ -273,7 +273,7 @@

    AUFDTLIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 019378.84311502.33100041850 @@ -293,7 +293,7 @@

    AUFDTLIRA (Single Query Track)

    - + 2020-CVC4n 0 00.00.00000418548 @@ -302,7 +302,7 @@

    AUFDTLIRA (Single Query Track)

    - + cvc5 0 00.00.000004185530 @@ -311,7 +311,7 @@

    AUFDTLIRA (Single Query Track)

    - + smtinterpol 0 00.00.000004185383 @@ -320,7 +320,7 @@

    AUFDTLIRA (Single Query Track)

    - + Vampire 0 00.00.000004185754 @@ -329,7 +329,7 @@

    AUFDTLIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.0000041850 @@ -338,7 +338,7 @@

    AUFDTLIRA (Single Query Track)

    - + z3-4.8.17n 0 00.00.00000418517 @@ -347,7 +347,7 @@

    AUFDTLIRA (Single Query Track)

    - + smtinterpol-fixedn 0 00.00.000004185902 @@ -367,7 +367,7 @@

    AUFDTLIRA (Single Query Track)

    - + cvc5 0 365517394.2617443.1473655036559521530 @@ -376,7 +376,7 @@

    AUFDTLIRA (Single Query Track)

    - + 2020-CVC4n 0 362816907.52116905.9243628036283652148 @@ -385,7 +385,7 @@

    AUFDTLIRA (Single Query Track)

    - + z3-4.8.17n 0 36224452.8564446.7613622036224252117 @@ -394,7 +394,7 @@

    AUFDTLIRA (Single Query Track)

    - + Vampire 0 34211246663.106511974.391342103421243521754 @@ -403,7 +403,7 @@

    AUFDTLIRA (Single Query Track)

    - + smtinterpol 0 3264417012.245396237.509326403264400521383 @@ -412,7 +412,7 @@

    AUFDTLIRA (Single Query Track)

    - + smtinterpol-fixedn 0 3096691979.59673880.632309603096568521902 @@ -421,7 +421,7 @@

    AUFDTLIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 016981.03310067.35600036645210 @@ -441,7 +441,7 @@

    AUFDTLIRA (Single Query Track)

    - + 2020-CVC4n 0 36261629.1241626.22836260362655951 @@ -450,7 +450,7 @@

    AUFDTLIRA (Single Query Track)

    - + cvc5 0 362614134.74914133.372362603626559559 @@ -459,7 +459,7 @@

    AUFDTLIRA (Single Query Track)

    - + z3-4.8.17n 0 36171078.0311074.97236170361756829 @@ -468,7 +468,7 @@

    AUFDTLIRA (Single Query Track)

    - + smtinterpol 0 315733759.42222037.2243157031571028594 @@ -477,7 +477,7 @@

    AUFDTLIRA (Single Query Track)

    - + smtinterpol-fixedn 0 300538752.49430370.04730050300511801022 @@ -486,7 +486,7 @@

    AUFDTLIRA (Single Query Track)

    - + Vampire 0 242454520.85945467.27124240242417611753 @@ -495,7 +495,7 @@

    AUFDTLIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 019378.84311502.33100041850 @@ -519,7 +519,6 @@

    AUFDTLIRA (Single Query Track)

    - + - diff --git a/archive/2022/results/aufdtlira-unsat-core.html b/archive/2022/results/aufdtlira-unsat-core.html index e0bdbc04..c84e680a 100644 --- a/archive/2022/results/aufdtlira-unsat-core.html +++ b/archive/2022/results/aufdtlira-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFDTLIRA (Unsat Core Track)

    Competition results for the AUFDTLIRA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    AUFDTLIRA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    AUFDTLIRA (Unsat Core Track)

    - + 2021-cvc5-ucn 0 194878 @@ -137,7 +137,7 @@

    AUFDTLIRA (Unsat Core Track)

    - + z3-4.8.17n 0 193761 @@ -148,7 +148,7 @@

    AUFDTLIRA (Unsat Core Track)

    - + cvc5 0 191712 @@ -159,7 +159,7 @@

    AUFDTLIRA (Unsat Core Track)

    - + smtinterpol 0 178716 @@ -170,7 +170,7 @@

    AUFDTLIRA (Unsat Core Track)

    - + Vampire 0 51382 @@ -181,7 +181,7 @@

    AUFDTLIRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -203,7 +203,7 @@

    AUFDTLIRA (Unsat Core Track)

    - + 2021-cvc5-ucn 0 19487819090.85519089.53614 @@ -212,7 +212,7 @@

    AUFDTLIRA (Unsat Core Track)

    - + z3-4.8.17n 0 1937616698.1326697.5764 @@ -221,7 +221,7 @@

    AUFDTLIRA (Unsat Core Track)

    - + cvc5 0 19171233741.29933739.31427 @@ -230,7 +230,7 @@

    AUFDTLIRA (Unsat Core Track)

    - + smtinterpol 0 178736456049.862430179.542327 @@ -239,7 +239,7 @@

    AUFDTLIRA (Unsat Core Track)

    - + Vampire 0 513821268839.363517462.774234 @@ -248,7 +248,7 @@

    AUFDTLIRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 023426.53914211.5860 @@ -272,7 +272,6 @@

    AUFDTLIRA (Unsat Core Track)

    - + - diff --git a/archive/2022/results/aufdtnira-cloud.html b/archive/2022/results/aufdtnira-cloud.html index 19573031..39f8d79e 100644 --- a/archive/2022/results/aufdtnira-cloud.html +++ b/archive/2022/results/aufdtnira-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFDTNIRA (Cloud Track)

    Competition results for the AUFDTNIRA - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    AUFDTNIRA (Cloud Track)

    - + cvc5-cloud 0 010800.000090 @@ -126,7 +126,7 @@

    AUFDTNIRA (Cloud Track)

    - + Vampire 0 010800.000090 @@ -146,7 +146,7 @@

    AUFDTNIRA (Cloud Track)

    - + cvc5-cloud 0 00.0000090 @@ -155,7 +155,7 @@

    AUFDTNIRA (Cloud Track)

    - + Vampire 0 00.0000090 @@ -175,7 +175,7 @@

    AUFDTNIRA (Cloud Track)

    - + cvc5-cloud 0 00.0000090 @@ -184,7 +184,7 @@

    AUFDTNIRA (Cloud Track)

    - + Vampire 0 00.0000090 @@ -204,7 +204,7 @@

    AUFDTNIRA (Cloud Track)

    - + cvc5-cloud 0 0216.000099 @@ -213,7 +213,7 @@

    AUFDTNIRA (Cloud Track)

    - + Vampire 0 0216.000099 @@ -237,7 +237,6 @@

    AUFDTNIRA (Cloud Track)

    - + - diff --git a/archive/2022/results/aufdtnira-parallel.html b/archive/2022/results/aufdtnira-parallel.html index c6dfbbe4..c0beaa13 100644 --- a/archive/2022/results/aufdtnira-parallel.html +++ b/archive/2022/results/aufdtnira-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFDTNIRA (Parallel Track)

    Competition results for the AUFDTNIRA - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    AUFDTNIRA (Parallel Track)

    - + Vampire 0 010800.000090 @@ -137,7 +137,7 @@

    AUFDTNIRA (Parallel Track)

    - + Vampire 0 00.0000090 @@ -157,7 +157,7 @@

    AUFDTNIRA (Parallel Track)

    - + Vampire 0 00.0000090 @@ -177,7 +177,7 @@

    AUFDTNIRA (Parallel Track)

    - + Vampire 0 0216.000099 @@ -201,7 +201,6 @@

    AUFDTNIRA (Parallel Track)

    - + - diff --git a/archive/2022/results/aufdtnira-proof-exhibition.html b/archive/2022/results/aufdtnira-proof-exhibition.html index 28650313..8b9193fa 100644 --- a/archive/2022/results/aufdtnira-proof-exhibition.html +++ b/archive/2022/results/aufdtnira-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFDTNIRA (Proof Exhibition Track)

    Competition results for the AUFDTNIRA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    AUFDTNIRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 626 @@ -130,7 +130,7 @@

    AUFDTNIRA (Proof Exhibition Track)

    - + cvc5 0 614 @@ -152,7 +152,7 @@

    AUFDTNIRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 6268090.1798089.30486 @@ -161,7 +161,7 @@

    AUFDTNIRA (Proof Exhibition Track)

    - + cvc5 0 61413227.30913084.439207 @@ -185,7 +185,6 @@

    AUFDTNIRA (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/aufdtnira-single-query.html b/archive/2022/results/aufdtnira-single-query.html index 4f17e1c8..d9414b49 100644 --- a/archive/2022/results/aufdtnira-single-query.html +++ b/archive/2022/results/aufdtnira-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFDTNIRA (Single Query Track)

    Competition results for the AUFDTNIRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    AUFDTNIRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5— - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    AUFDTNIRA (Single Query Track)

    - + cvc5 0 603 @@ -142,7 +142,7 @@

    AUFDTNIRA (Single Query Track)

    - + 2020-CVC4n 0 599 @@ -153,7 +153,7 @@

    AUFDTNIRA (Single Query Track)

    - + z3-4.8.17n 0 599 @@ -164,7 +164,7 @@

    AUFDTNIRA (Single Query Track)

    - + Vampire 0 581 @@ -175,7 +175,7 @@

    AUFDTNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -197,7 +197,7 @@

    AUFDTNIRA (Single Query Track)

    - + cvc5 0 603141900.635141899.4256030603159118 @@ -206,7 +206,7 @@

    AUFDTNIRA (Single Query Track)

    - + 2020-CVC4n 0 59946160.27346160.199599059916338 @@ -215,7 +215,7 @@

    AUFDTNIRA (Single Query Track)

    - + z3-4.8.17n 0 599191607.797191607.3135990599163158 @@ -224,7 +224,7 @@

    AUFDTNIRA (Single Query Track)

    - + Vampire 0 596372536.448234695.5425960596166166 @@ -233,7 +233,7 @@

    AUFDTNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 03542.4112091.220007620 @@ -253,7 +253,7 @@

    AUFDTNIRA (Single Query Track)

    - + 2020-CVC4n 0 00.00.0000076238 @@ -262,7 +262,7 @@

    AUFDTNIRA (Single Query Track)

    - + cvc5 0 00.00.00000762118 @@ -271,7 +271,7 @@

    AUFDTNIRA (Single Query Track)

    - + Vampire 0 00.00.00000762166 @@ -280,7 +280,7 @@

    AUFDTNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.000007620 @@ -289,7 +289,7 @@

    AUFDTNIRA (Single Query Track)

    - + z3-4.8.17n 0 00.00.00000762158 @@ -309,7 +309,7 @@

    AUFDTNIRA (Single Query Track)

    - + cvc5 0 60310929.03810928.793603060311148118 @@ -318,7 +318,7 @@

    AUFDTNIRA (Single Query Track)

    - + 2020-CVC4n 0 59910149.10110149.05159905991514838 @@ -327,7 +327,7 @@

    AUFDTNIRA (Single Query Track)

    - + z3-4.8.17n 0 59917577.45817576.73599059915148158 @@ -336,7 +336,7 @@

    AUFDTNIRA (Single Query Track)

    - + Vampire 0 596162534.32857158.662596059618148166 @@ -345,7 +345,7 @@

    AUFDTNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 02855.1281684.2480006141480 @@ -365,7 +365,7 @@

    AUFDTNIRA (Single Query Track)

    - + cvc5 0 6033132.6353131.4256030603159118 @@ -374,7 +374,7 @@

    AUFDTNIRA (Single Query Track)

    - + 2020-CVC4n 0 598995.369995.187598059816439 @@ -383,7 +383,7 @@

    AUFDTNIRA (Single Query Track)

    - + z3-4.8.17n 0 5954020.0494019.05950595167165 @@ -392,7 +392,7 @@

    AUFDTNIRA (Single Query Track)

    - + Vampire 0 4309965.0148511.9574300430332332 @@ -401,7 +401,7 @@

    AUFDTNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 03542.4112091.220007620 @@ -425,7 +425,6 @@

    AUFDTNIRA (Single Query Track)

    - + - diff --git a/archive/2022/results/aufdtnira-unsat-core.html b/archive/2022/results/aufdtnira-unsat-core.html index 900d595d..187e5107 100644 --- a/archive/2022/results/aufdtnira-unsat-core.html +++ b/archive/2022/results/aufdtnira-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFDTNIRA (Unsat Core Track)

    Competition results for the AUFDTNIRA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    AUFDTNIRA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    AUFDTNIRA (Unsat Core Track)

    - + z3-4.8.17n 0 32100 @@ -137,7 +137,7 @@

    AUFDTNIRA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 32033 @@ -148,7 +148,7 @@

    AUFDTNIRA (Unsat Core Track)

    - + cvc5 0 31953 @@ -159,7 +159,7 @@

    AUFDTNIRA (Unsat Core Track)

    - + Vampire 0 13357 @@ -170,7 +170,7 @@

    AUFDTNIRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -192,7 +192,7 @@

    AUFDTNIRA (Unsat Core Track)

    - + z3-4.8.17n 0 3210014519.59914519.63312 @@ -201,7 +201,7 @@

    AUFDTNIRA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 320336543.0486543.2395 @@ -210,7 +210,7 @@

    AUFDTNIRA (Unsat Core Track)

    - + cvc5 0 319538544.078541.8217 @@ -219,7 +219,7 @@

    AUFDTNIRA (Unsat Core Track)

    - + Vampire 0 13357166048.49157241.69617 @@ -228,7 +228,7 @@

    AUFDTNIRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 02968.0321870.6140 @@ -252,7 +252,6 @@

    AUFDTNIRA (Unsat Core Track)

    - + - diff --git a/archive/2022/results/auffpdtnira-proof-exhibition.html b/archive/2022/results/auffpdtnira-proof-exhibition.html index 0e206c5d..110c7f82 100644 --- a/archive/2022/results/auffpdtnira-proof-exhibition.html +++ b/archive/2022/results/auffpdtnira-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFFPDTNIRA (Proof Exhibition Track)

    Competition results for the AUFFPDTNIRA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    AUFFPDTNIRA (Proof Exhibition Track)

    - + cvc5 0 125 @@ -130,7 +130,7 @@

    AUFFPDTNIRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 118 @@ -152,7 +152,7 @@

    AUFFPDTNIRA (Proof Exhibition Track)

    - + cvc5 0 1258483.2268480.2877 @@ -161,7 +161,7 @@

    AUFFPDTNIRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 1188420.3938420.338147 @@ -185,7 +185,6 @@

    AUFFPDTNIRA (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/auffpdtnira-single-query.html b/archive/2022/results/auffpdtnira-single-query.html index d98401ee..76ff208c 100644 --- a/archive/2022/results/auffpdtnira-single-query.html +++ b/archive/2022/results/auffpdtnira-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFFPDTNIRA (Single Query Track)

    Competition results for the AUFFPDTNIRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    AUFFPDTNIRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5— - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    AUFFPDTNIRA (Single Query Track)

    - + cvc5 0 132 @@ -142,7 +142,7 @@

    AUFFPDTNIRA (Single Query Track)

    - + z3-4.8.17n 0 131 @@ -153,7 +153,7 @@

    AUFFPDTNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -175,7 +175,7 @@

    AUFFPDTNIRA (Single Query Track)

    - + cvc5 0 1328423.268423.2131320132277 @@ -184,7 +184,7 @@

    AUFFPDTNIRA (Single Query Track)

    - + z3-4.8.17n 0 13110264.64810264.6031310131288 @@ -193,7 +193,7 @@

    AUFFPDTNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0732.298440.2240001590 @@ -213,7 +213,7 @@

    AUFFPDTNIRA (Single Query Track)

    - + cvc5 0 00.00.000001597 @@ -222,7 +222,7 @@

    AUFFPDTNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.000001590 @@ -231,7 +231,7 @@

    AUFFPDTNIRA (Single Query Track)

    - + z3-4.8.17n 0 00.00.000001598 @@ -251,7 +251,7 @@

    AUFFPDTNIRA (Single Query Track)

    - + cvc5 0 1323616.9943616.9513201323247 @@ -260,7 +260,7 @@

    AUFFPDTNIRA (Single Query Track)

    - + z3-4.8.17n 0 1315457.2955457.28513101314248 @@ -269,7 +269,7 @@

    AUFFPDTNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0620.261373.483000135240 @@ -289,7 +289,7 @@

    AUFFPDTNIRA (Single Query Track)

    - + cvc5 0 132191.26191.2131320132277 @@ -298,7 +298,7 @@

    AUFFPDTNIRA (Single Query Track)

    - + z3-4.8.17n 0 129301.098300.84112901293010 @@ -307,7 +307,7 @@

    AUFFPDTNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0732.298440.2240001590 @@ -331,7 +331,6 @@

    AUFFPDTNIRA (Single Query Track)

    - + - diff --git a/archive/2022/results/auffpdtnira-unsat-core.html b/archive/2022/results/auffpdtnira-unsat-core.html index d395d942..1d162599 100644 --- a/archive/2022/results/auffpdtnira-unsat-core.html +++ b/archive/2022/results/auffpdtnira-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFFPDTNIRA (Unsat Core Track)

    Competition results for the AUFFPDTNIRA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    AUFFPDTNIRA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    AUFFPDTNIRA (Unsat Core Track)

    - + cvc5 0 4301 @@ -137,7 +137,7 @@

    AUFFPDTNIRA (Unsat Core Track)

    - + z3-4.8.17n 0 4292 @@ -148,7 +148,7 @@

    AUFFPDTNIRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -170,7 +170,7 @@

    AUFFPDTNIRA (Unsat Core Track)

    - + cvc5 0 43012414.9422415.0162 @@ -179,7 +179,7 @@

    AUFFPDTNIRA (Unsat Core Track)

    - + z3-4.8.17n 0 42924901.8534901.854 @@ -188,7 +188,7 @@

    AUFFPDTNIRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0612.562391.5140 @@ -212,7 +212,6 @@

    AUFFPDTNIRA (Unsat Core Track)

    - + - diff --git a/archive/2022/results/auflia-cloud.html b/archive/2022/results/auflia-cloud.html index 9b8ce4af..9d60f4fe 100644 --- a/archive/2022/results/auflia-cloud.html +++ b/archive/2022/results/auflia-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFLIA (Cloud Track)

    Competition results for the AUFLIA - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    AUFLIA (Cloud Track)

    - + Vampire 0 28078.93620280 @@ -126,7 +126,7 @@

    AUFLIA (Cloud Track)

    - + cvc5-cloud 0 012000.0000100 @@ -146,7 +146,7 @@

    AUFLIA (Cloud Track)

    - + cvc5-cloud 0 00.00000100 @@ -155,7 +155,7 @@

    AUFLIA (Cloud Track)

    - + Vampire 0 00.00000100 @@ -175,7 +175,7 @@

    AUFLIA (Cloud Track)

    - + Vampire 0 2126.899202080 @@ -184,7 +184,7 @@

    AUFLIA (Cloud Track)

    - + cvc5-cloud 0 02400.0000280 @@ -204,7 +204,7 @@

    AUFLIA (Cloud Track)

    - + Vampire 0 1232.77710199 @@ -213,7 +213,7 @@

    AUFLIA (Cloud Track)

    - + cvc5-cloud 0 0240.00001010 @@ -237,7 +237,6 @@

    AUFLIA (Cloud Track)

    - + - diff --git a/archive/2022/results/auflia-parallel.html b/archive/2022/results/auflia-parallel.html index 91ba4e81..8b29fdf5 100644 --- a/archive/2022/results/auflia-parallel.html +++ b/archive/2022/results/auflia-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFLIA (Parallel Track)

    Competition results for the AUFLIA - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    AUFLIA (Parallel Track)

    - + Vampire 0 210430.45220280 @@ -137,7 +137,7 @@

    AUFLIA (Parallel Track)

    - + Vampire 0 00.00000100 @@ -157,7 +157,7 @@

    AUFLIA (Parallel Track)

    - + Vampire 0 2830.452202080 @@ -177,7 +177,7 @@

    AUFLIA (Parallel Track)

    - + Vampire 0 0240.00001010 @@ -201,7 +201,6 @@

    AUFLIA (Parallel Track)

    - + - diff --git a/archive/2022/results/auflia-proof-exhibition.html b/archive/2022/results/auflia-proof-exhibition.html index 484c7db7..fa747e09 100644 --- a/archive/2022/results/auflia-proof-exhibition.html +++ b/archive/2022/results/auflia-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFLIA (Proof Exhibition Track)

    Competition results for the AUFLIA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    AUFLIA (Proof Exhibition Track)

    - + veriT 0 1174 @@ -130,7 +130,7 @@

    AUFLIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 1169 @@ -141,7 +141,7 @@

    AUFLIA (Proof Exhibition Track)

    - + cvc5 0 1169 @@ -152,7 +152,7 @@

    AUFLIA (Proof Exhibition Track)

    - + smtinterpol 0 1040 @@ -174,7 +174,7 @@

    AUFLIA (Proof Exhibition Track)

    - + veriT 0 1174141748.735141739.636122105 @@ -183,7 +183,7 @@

    AUFLIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 1169159048.675159049.673127127 @@ -192,7 +192,7 @@

    AUFLIA (Proof Exhibition Track)

    - + cvc5 0 1169159203.582159149.804127127 @@ -201,7 +201,7 @@

    AUFLIA (Proof Exhibition Track)

    - + smtinterpol 0 1040336887.843300499.219256219 @@ -225,7 +225,6 @@

    AUFLIA (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/auflia-single-query.html b/archive/2022/results/auflia-single-query.html index 0a9dcce6..b0beed18 100644 --- a/archive/2022/results/auflia-single-query.html +++ b/archive/2022/results/auflia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFLIA (Single Query Track)

    Competition results for the AUFLIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    AUFLIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + Vampire - - + + Vampire - + @@ -131,7 +131,7 @@

    AUFLIA (Single Query Track)

    - + cvc5 0 1359 @@ -142,7 +142,7 @@

    AUFLIA (Single Query Track)

    - + 2020-CVC4n 0 1356 @@ -153,7 +153,7 @@

    AUFLIA (Single Query Track)

    - + Vampire 0 1347 @@ -164,7 +164,7 @@

    AUFLIA (Single Query Track)

    - + z3-4.8.17n 0 1282 @@ -175,7 +175,7 @@

    AUFLIA (Single Query Track)

    - + veriT 0 1233 @@ -186,7 +186,7 @@

    AUFLIA (Single Query Track)

    - + smtinterpol-fixedn 0 1107 @@ -197,7 +197,7 @@

    AUFLIA (Single Query Track)

    - + smtinterpol 0 1107 @@ -208,7 +208,7 @@

    AUFLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 52 @@ -230,7 +230,7 @@

    AUFLIA (Single Query Track)

    - + cvc5 0 1359328253.89328724.81313591221237279252 @@ -239,7 +239,7 @@

    AUFLIA (Single Query Track)

    - + 2020-CVC4n 0 1356327413.857327883.53213561191237282251 @@ -248,7 +248,7 @@

    AUFLIA (Single Query Track)

    - + Vampire 0 1356400917.949337963.3231356961260282276 @@ -257,7 +257,7 @@

    AUFLIA (Single Query Track)

    - + z3-4.8.17n 0 1282388717.867389686.21312821511131356288 @@ -266,7 +266,7 @@

    AUFLIA (Single Query Track)

    - + veriT 0 1233310106.165310083.856123301233405251 @@ -275,7 +275,7 @@

    AUFLIA (Single Query Track)

    - + smtinterpol-fixedn 0 1108581899.587553134.2921108791029530434 @@ -284,7 +284,7 @@

    AUFLIA (Single Query Track)

    - + smtinterpol 0 1108580781.411554330.2511108791029530436 @@ -293,7 +293,7 @@

    AUFLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 5210948.6336356.3145284415860 @@ -313,7 +313,7 @@

    AUFLIA (Single Query Track)

    - + z3-4.8.17n 0 1515266.9175267.271151151061481288 @@ -322,7 +322,7 @@

    AUFLIA (Single Query Track)

    - + cvc5 0 12222922.88423055.0541221220351481252 @@ -331,7 +331,7 @@

    AUFLIA (Single Query Track)

    - + 2020-CVC4n 0 11922584.21722618.6821191190381481251 @@ -340,7 +340,7 @@

    AUFLIA (Single Query Track)

    - + Vampire 0 9676814.5573206.06296960611481276 @@ -349,7 +349,7 @@

    AUFLIA (Single Query Track)

    - + smtinterpol-fixedn 0 7930195.87729965.16979790781481434 @@ -358,7 +358,7 @@

    AUFLIA (Single Query Track)

    - + smtinterpol 0 7930197.74129973.19679790781481436 @@ -367,7 +367,7 @@

    AUFLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 8812.947465.13288014914810 @@ -376,7 +376,7 @@

    AUFLIA (Single Query Track)

    - + veriT 0 035274.00935273.9690001571481251 @@ -396,7 +396,7 @@

    AUFLIA (Single Query Track)

    - + Vampire 0 126056502.14933203.22212600126028350276 @@ -405,7 +405,7 @@

    AUFLIA (Single Query Track)

    - + cvc5 0 123774449.374787.62412370123751350252 @@ -414,7 +414,7 @@

    AUFLIA (Single Query Track)

    - + 2020-CVC4n 0 123774755.2475189.80912370123751350251 @@ -423,7 +423,7 @@

    AUFLIA (Single Query Track)

    - + veriT 0 123371147.53971125.02312330123355350251 @@ -432,7 +432,7 @@

    AUFLIA (Single Query Track)

    - + z3-4.8.17n 0 1131163485.452164451.16113101131157350288 @@ -441,7 +441,7 @@

    AUFLIA (Single Query Track)

    - + smtinterpol 0 1029335867.553314217.49102901029259350436 @@ -450,7 +450,7 @@

    AUFLIA (Single Query Track)

    - + smtinterpol-fixedn 0 1029334652.418314556.052102901029259350434 @@ -459,7 +459,7 @@

    AUFLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 447909.9064572.5754404412443500 @@ -479,7 +479,7 @@

    AUFLIA (Single Query Track)

    - + Vampire 0 13278456.6857738.9041327961231311305 @@ -488,7 +488,7 @@

    AUFLIA (Single Query Track)

    - + z3-4.8.17n 0 12629428.659426.5612621501112376372 @@ -497,7 +497,7 @@

    AUFLIA (Single Query Track)

    - + cvc5 0 12259700.5539705.6691225821143413392 @@ -506,7 +506,7 @@

    AUFLIA (Single Query Track)

    - + veriT 0 12217587.6577573.883122101221417276 @@ -515,7 +515,7 @@

    AUFLIA (Single Query Track)

    - + 2020-CVC4n 0 12179870.8839874.7241217811136421401 @@ -524,7 +524,7 @@

    AUFLIA (Single Query Track)

    - + smtinterpol 0 103017021.69914848.873103079951608542 @@ -533,7 +533,7 @@

    AUFLIA (Single Query Track)

    - + smtinterpol-fixedn 0 102917027.76814838.441102979950609543 @@ -542,7 +542,7 @@

    AUFLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 529782.4865619.76852844158613 @@ -566,7 +566,6 @@

    AUFLIA (Single Query Track)

    - + - diff --git a/archive/2022/results/auflia-unsat-core.html b/archive/2022/results/auflia-unsat-core.html index 97e62d7f..850e69cd 100644 --- a/archive/2022/results/auflia-unsat-core.html +++ b/archive/2022/results/auflia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFLIA (Unsat Core Track)

    Competition results for the AUFLIA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    AUFLIA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    AUFLIA (Unsat Core Track)

    - + cvc5 0 34520 @@ -137,7 +137,7 @@

    AUFLIA (Unsat Core Track)

    - + 2021-cvc5-ucn 0 34430 @@ -148,7 +148,7 @@

    AUFLIA (Unsat Core Track)

    - + z3-4.8.17n 0 33464 @@ -159,7 +159,7 @@

    AUFLIA (Unsat Core Track)

    - + Vampire 0 30276 @@ -170,7 +170,7 @@

    AUFLIA (Unsat Core Track)

    - + smtinterpol 0 28686 @@ -181,7 +181,7 @@

    AUFLIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 3 599 @@ -203,7 +203,7 @@

    AUFLIA (Unsat Core Track)

    - + cvc5 0 34520181117.941181118.202147 @@ -212,7 +212,7 @@

    AUFLIA (Unsat Core Track)

    - + 2021-cvc5-ucn 0 34430192559.383192560.03155 @@ -221,7 +221,7 @@

    AUFLIA (Unsat Core Track)

    - + z3-4.8.17n 0 33464175593.655175589.803126 @@ -230,7 +230,7 @@

    AUFLIA (Unsat Core Track)

    - + Vampire 0 3074359392.79233912.09122 @@ -239,7 +239,7 @@

    AUFLIA (Unsat Core Track)

    - + smtinterpol 0 28686347676.769332036.356256 @@ -248,7 +248,7 @@

    AUFLIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 3 5998212.9474687.3590 @@ -272,7 +272,6 @@

    AUFLIA (Unsat Core Track)

    - + - diff --git a/archive/2022/results/auflira-cloud.html b/archive/2022/results/auflira-cloud.html index 1dcbd0a0..396ab6b9 100644 --- a/archive/2022/results/auflira-cloud.html +++ b/archive/2022/results/auflira-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFLIRA (Cloud Track)

    Competition results for the AUFLIRA - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    AUFLIRA (Cloud Track)

    - + cvc5-cloud 0 08400.000070 @@ -126,7 +126,7 @@

    AUFLIRA (Cloud Track)

    - + Vampire 0 08400.000070 @@ -146,7 +146,7 @@

    AUFLIRA (Cloud Track)

    - + cvc5-cloud 0 00.0000070 @@ -155,7 +155,7 @@

    AUFLIRA (Cloud Track)

    - + Vampire 0 00.0000070 @@ -175,7 +175,7 @@

    AUFLIRA (Cloud Track)

    - + cvc5-cloud 0 00.0000070 @@ -184,7 +184,7 @@

    AUFLIRA (Cloud Track)

    - + Vampire 0 00.0000070 @@ -204,7 +204,7 @@

    AUFLIRA (Cloud Track)

    - + cvc5-cloud 0 0168.000077 @@ -213,7 +213,7 @@

    AUFLIRA (Cloud Track)

    - + Vampire 0 0168.000077 @@ -237,7 +237,6 @@

    AUFLIRA (Cloud Track)

    - + - diff --git a/archive/2022/results/auflira-parallel.html b/archive/2022/results/auflira-parallel.html index 0761ab77..2b9d00c6 100644 --- a/archive/2022/results/auflira-parallel.html +++ b/archive/2022/results/auflira-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFLIRA (Parallel Track)

    Competition results for the AUFLIRA - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    AUFLIRA (Parallel Track)

    - + Vampire 0 08400.000070 @@ -137,7 +137,7 @@

    AUFLIRA (Parallel Track)

    - + Vampire 0 00.0000070 @@ -157,7 +157,7 @@

    AUFLIRA (Parallel Track)

    - + Vampire 0 00.0000070 @@ -177,7 +177,7 @@

    AUFLIRA (Parallel Track)

    - + Vampire 0 0168.000077 @@ -201,7 +201,6 @@

    AUFLIRA (Parallel Track)

    - + - diff --git a/archive/2022/results/auflira-proof-exhibition.html b/archive/2022/results/auflira-proof-exhibition.html index 880c3295..82dfb69e 100644 --- a/archive/2022/results/auflira-proof-exhibition.html +++ b/archive/2022/results/auflira-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFLIRA (Proof Exhibition Track)

    Competition results for the AUFLIRA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    AUFLIRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 9849 @@ -130,7 +130,7 @@

    AUFLIRA (Proof Exhibition Track)

    - + cvc5 0 9805 @@ -141,7 +141,7 @@

    AUFLIRA (Proof Exhibition Track)

    - + veriT 0 9660 @@ -152,7 +152,7 @@

    AUFLIRA (Proof Exhibition Track)

    - + smtinterpol 0 9536 @@ -174,7 +174,7 @@

    AUFLIRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 984958460.82758430.8823939 @@ -183,7 +183,7 @@

    AUFLIRA (Proof Exhibition Track)

    - + cvc5 0 980596631.43496487.1558377 @@ -192,7 +192,7 @@

    AUFLIRA (Proof Exhibition Track)

    - + veriT 0 9660266402.983266330.08228221 @@ -201,7 +201,7 @@

    AUFLIRA (Proof Exhibition Track)

    - + smtinterpol 0 9536389416.149363396.801352288 @@ -225,7 +225,6 @@

    AUFLIRA (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/auflira-single-query.html b/archive/2022/results/auflira-single-query.html index 630583c6..23937695 100644 --- a/archive/2022/results/auflira-single-query.html +++ b/archive/2022/results/auflira-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFLIRA (Single Query Track)

    Competition results for the AUFLIRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    AUFLIRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5— - - + + cvc5 - - + + Vampire - + @@ -131,7 +131,7 @@

    AUFLIRA (Single Query Track)

    - + z3-4.8.17n 0 1567 @@ -142,7 +142,7 @@

    AUFLIRA (Single Query Track)

    - + cvc5 0 1563 @@ -153,7 +153,7 @@

    AUFLIRA (Single Query Track)

    - + 2020-CVC4n 0 1561 @@ -164,7 +164,7 @@

    AUFLIRA (Single Query Track)

    - + Vampire 0 1517 @@ -175,7 +175,7 @@

    AUFLIRA (Single Query Track)

    - + veriT 0 1342 @@ -186,7 +186,7 @@

    AUFLIRA (Single Query Track)

    - + smtinterpol 0 1335 @@ -197,7 +197,7 @@

    AUFLIRA (Single Query Track)

    - + smtinterpol-fixedn 0 1305 @@ -208,7 +208,7 @@

    AUFLIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 9 @@ -230,7 +230,7 @@

    AUFLIRA (Single Query Track)

    - + z3-4.8.17n 0 1567102652.816102652.733156711155611667 @@ -239,7 +239,7 @@

    AUFLIRA (Single Query Track)

    - + cvc5 0 1563147701.789147752.763156301563120118 @@ -248,7 +248,7 @@

    AUFLIRA (Single Query Track)

    - + 2020-CVC4n 0 1561149565.81149624.589156101561122121 @@ -257,7 +257,7 @@

    AUFLIRA (Single Query Track)

    - + Vampire 0 1537223497.814170386.81153701537146133 @@ -266,7 +266,7 @@

    AUFLIRA (Single Query Track)

    - + veriT 0 1342406905.239406903.602134201342341267 @@ -275,7 +275,7 @@

    AUFLIRA (Single Query Track)

    - + smtinterpol 0 1335288132.986272213.941133501335348217 @@ -284,7 +284,7 @@

    AUFLIRA (Single Query Track)

    - + smtinterpol-fixedn 0 1306467152.463447580.119130601306377347 @@ -293,7 +293,7 @@

    AUFLIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 98589.8045036.99190916740 @@ -313,7 +313,7 @@

    AUFLIRA (Single Query Track)

    - + z3-4.8.17n 0 1146926.26946924.671111039163367 @@ -322,7 +322,7 @@

    AUFLIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0302.753171.5220005016330 @@ -331,7 +331,7 @@

    AUFLIRA (Single Query Track)

    - + smtinterpol 0 02960.0412596.673000501633217 @@ -340,7 +340,7 @@

    AUFLIRA (Single Query Track)

    - + smtinterpol-fixedn 0 054051.92153809.694000501633347 @@ -349,7 +349,7 @@

    AUFLIRA (Single Query Track)

    - + veriT 0 057621.00457621.006000501633267 @@ -358,7 +358,7 @@

    AUFLIRA (Single Query Track)

    - + cvc5 0 057728.04657729.035000501633118 @@ -367,7 +367,7 @@

    AUFLIRA (Single Query Track)

    - + 2020-CVC4n 0 058929.01258930.275000501633121 @@ -376,7 +376,7 @@

    AUFLIRA (Single Query Track)

    - + Vampire 0 063600.2459998.02000501633133 @@ -396,7 +396,7 @@

    AUFLIRA (Single Query Track)

    - + cvc5 0 15638373.7438423.7281563015632118118 @@ -405,7 +405,7 @@

    AUFLIRA (Single Query Track)

    - + 2020-CVC4n 0 15619036.7989094.3141561015614118121 @@ -414,7 +414,7 @@

    AUFLIRA (Single Query Track)

    - + z3-4.8.17n 0 15568783.0358780.916155601556911867 @@ -423,7 +423,7 @@

    AUFLIRA (Single Query Track)

    - + Vampire 0 153760296.68428804.715370153728118133 @@ -432,7 +432,7 @@

    AUFLIRA (Single Query Track)

    - + veriT 0 1342267684.235267682.596134201342223118267 @@ -441,7 +441,7 @@

    AUFLIRA (Single Query Track)

    - + smtinterpol 0 1335214744.109211271.732133501335230118217 @@ -450,7 +450,7 @@

    AUFLIRA (Single Query Track)

    - + smtinterpol-fixedn 0 1306333985.947329478.593130601306259118347 @@ -459,7 +459,7 @@

    AUFLIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 97942.7454665.01490915561180 @@ -479,7 +479,7 @@

    AUFLIRA (Single Query Track)

    - + z3-4.8.17n 0 15652756.4152752.05156591556118108 @@ -488,7 +488,7 @@

    AUFLIRA (Single Query Track)

    - + Vampire 0 15055143.5444419.189150501505178165 @@ -497,7 +497,7 @@

    AUFLIRA (Single Query Track)

    - + 2020-CVC4n 0 15054423.2174422.846150501505178178 @@ -506,7 +506,7 @@

    AUFLIRA (Single Query Track)

    - + cvc5 0 15044464.764464.1150401504179179 @@ -515,7 +515,7 @@

    AUFLIRA (Single Query Track)

    - + veriT 0 13428241.2398239.602134201342341267 @@ -524,7 +524,7 @@

    AUFLIRA (Single Query Track)

    - + smtinterpol 0 13248606.1046954.396132401324359236 @@ -533,7 +533,7 @@

    AUFLIRA (Single Query Track)

    - + smtinterpol-fixedn 0 124812025.5511154.819124801248435429 @@ -542,7 +542,7 @@

    AUFLIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 98589.8045036.99190916740 @@ -566,7 +566,6 @@

    AUFLIRA (Single Query Track)

    - + - diff --git a/archive/2022/results/auflira-unsat-core.html b/archive/2022/results/auflira-unsat-core.html index 25d3c5fc..604cfaca 100644 --- a/archive/2022/results/auflira-unsat-core.html +++ b/archive/2022/results/auflira-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFLIRA (Unsat Core Track)

    Competition results for the AUFLIRA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    AUFLIRA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    AUFLIRA (Unsat Core Track)

    - + 2021-cvc5-ucn 0 181102 @@ -137,7 +137,7 @@

    AUFLIRA (Unsat Core Track)

    - + z3-4.8.17n 0 181071 @@ -148,7 +148,7 @@

    AUFLIRA (Unsat Core Track)

    - + cvc5 0 180991 @@ -159,7 +159,7 @@

    AUFLIRA (Unsat Core Track)

    - + Vampire 0 172332 @@ -170,7 +170,7 @@

    AUFLIRA (Unsat Core Track)

    - + smtinterpol 0 155904 @@ -181,7 +181,7 @@

    AUFLIRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 6786 @@ -203,7 +203,7 @@

    AUFLIRA (Unsat Core Track)

    - + 2021-cvc5-ucn 0 18110273261.10173257.18560 @@ -212,7 +212,7 @@

    AUFLIRA (Unsat Core Track)

    - + z3-4.8.17n 0 18107111514.70811503.5426 @@ -221,7 +221,7 @@

    AUFLIRA (Unsat Core Track)

    - + cvc5 0 18099155709.03155708.33541 @@ -230,7 +230,7 @@

    AUFLIRA (Unsat Core Track)

    - + Vampire 0 17234155194.81430755.39619 @@ -239,7 +239,7 @@

    AUFLIRA (Unsat Core Track)

    - + smtinterpol 0 155904364656.277356215.962287 @@ -248,7 +248,7 @@

    AUFLIRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 678650323.55330315.2350 @@ -272,7 +272,6 @@

    AUFLIRA (Unsat Core Track)

    - + - diff --git a/archive/2022/results/aufnia-proof-exhibition.html b/archive/2022/results/aufnia-proof-exhibition.html index 9d936351..96398e3e 100644 --- a/archive/2022/results/aufnia-proof-exhibition.html +++ b/archive/2022/results/aufnia-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFNIA (Proof Exhibition Track)

    Competition results for the AUFNIA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    AUFNIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 0 @@ -130,7 +130,7 @@

    AUFNIA (Proof Exhibition Track)

    - + cvc5 0 0 @@ -152,7 +152,7 @@

    AUFNIA (Proof Exhibition Track)

    - + cvc5 0 03600.03600.033 @@ -161,7 +161,7 @@

    AUFNIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 03600.03600.033 @@ -185,7 +185,6 @@

    AUFNIA (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/aufnia-single-query.html b/archive/2022/results/aufnia-single-query.html index 6e19ae2b..58c08877 100644 --- a/archive/2022/results/aufnia-single-query.html +++ b/archive/2022/results/aufnia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFNIA (Single Query Track)

    Competition results for the AUFNIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    AUFNIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) ——— - - + + — - - + + — - + @@ -131,7 +131,7 @@

    AUFNIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -142,7 +142,7 @@

    AUFNIA (Single Query Track)

    - + 2020-CVC4n 0 0 @@ -153,7 +153,7 @@

    AUFNIA (Single Query Track)

    - + cvc5 0 0 @@ -164,7 +164,7 @@

    AUFNIA (Single Query Track)

    - + z3-4.8.17n 0 0 @@ -175,7 +175,7 @@

    AUFNIA (Single Query Track)

    - + Vampire 0 0 @@ -197,7 +197,7 @@

    AUFNIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 014.6378.62900030 @@ -206,7 +206,7 @@

    AUFNIA (Single Query Track)

    - + Vampire 0 07200.163592.9300033 @@ -215,7 +215,7 @@

    AUFNIA (Single Query Track)

    - + 2020-CVC4n 0 03600.03600.000033 @@ -224,7 +224,7 @@

    AUFNIA (Single Query Track)

    - + cvc5 0 03600.03600.000033 @@ -233,7 +233,7 @@

    AUFNIA (Single Query Track)

    - + z3-4.8.17n 0 03600.03600.000033 @@ -253,7 +253,7 @@

    AUFNIA (Single Query Track)

    - + 2020-CVC4n 0 00.00.0000033 @@ -262,7 +262,7 @@

    AUFNIA (Single Query Track)

    - + cvc5 0 00.00.0000033 @@ -271,7 +271,7 @@

    AUFNIA (Single Query Track)

    - + Vampire 0 00.00.0000033 @@ -280,7 +280,7 @@

    AUFNIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.0000030 @@ -289,7 +289,7 @@

    AUFNIA (Single Query Track)

    - + z3-4.8.17n 0 00.00.0000033 @@ -309,7 +309,7 @@

    AUFNIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 014.6378.629000300 @@ -318,7 +318,7 @@

    AUFNIA (Single Query Track)

    - + Vampire 0 07200.163592.93000303 @@ -327,7 +327,7 @@

    AUFNIA (Single Query Track)

    - + 2020-CVC4n 0 03600.03600.0000303 @@ -336,7 +336,7 @@

    AUFNIA (Single Query Track)

    - + cvc5 0 03600.03600.0000303 @@ -345,7 +345,7 @@

    AUFNIA (Single Query Track)

    - + z3-4.8.17n 0 03600.03600.0000303 @@ -365,7 +365,7 @@

    AUFNIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 014.6378.62900030 @@ -374,7 +374,7 @@

    AUFNIA (Single Query Track)

    - + 2020-CVC4n 0 072.072.000033 @@ -383,7 +383,7 @@

    AUFNIA (Single Query Track)

    - + cvc5 0 072.072.000033 @@ -392,7 +392,7 @@

    AUFNIA (Single Query Track)

    - + Vampire 0 072.072.000033 @@ -401,7 +401,7 @@

    AUFNIA (Single Query Track)

    - + z3-4.8.17n 0 072.072.000033 @@ -425,7 +425,6 @@

    AUFNIA (Single Query Track)

    - + - diff --git a/archive/2022/results/aufnia-unsat-core.html b/archive/2022/results/aufnia-unsat-core.html index fdf24e20..7f47c88d 100644 --- a/archive/2022/results/aufnia-unsat-core.html +++ b/archive/2022/results/aufnia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFNIA (Unsat Core Track)

    Competition results for the AUFNIA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    AUFNIA (Unsat Core Track)

    Sequential PerformanceParallel Performance —— - - + + @@ -126,7 +126,7 @@

    AUFNIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -137,7 +137,7 @@

    AUFNIA (Unsat Core Track)

    - + z3-4.8.17n 0 0 @@ -148,7 +148,7 @@

    AUFNIA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 0 @@ -159,7 +159,7 @@

    AUFNIA (Unsat Core Track)

    - + cvc5 0 0 @@ -170,7 +170,7 @@

    AUFNIA (Unsat Core Track)

    - + Vampire 0 0 @@ -192,7 +192,7 @@

    AUFNIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 015.6948.640 @@ -201,7 +201,7 @@

    AUFNIA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 03600.03600.03 @@ -210,7 +210,7 @@

    AUFNIA (Unsat Core Track)

    - + cvc5 0 03600.03600.03 @@ -219,7 +219,7 @@

    AUFNIA (Unsat Core Track)

    - + Vampire 0 03600.03600.03 @@ -228,7 +228,7 @@

    AUFNIA (Unsat Core Track)

    - + z3-4.8.17n 0 03600.03600.03 @@ -252,7 +252,6 @@

    AUFNIA (Unsat Core Track)

    - + - diff --git a/archive/2022/results/aufnira-cloud.html b/archive/2022/results/aufnira-cloud.html index 069e592b..7953fc98 100644 --- a/archive/2022/results/aufnira-cloud.html +++ b/archive/2022/results/aufnira-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFNIRA (Cloud Track)

    Competition results for the AUFNIRA - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    AUFNIRA (Cloud Track)

    - + Vampire 0 441680.541404340 @@ -126,7 +126,7 @@

    AUFNIRA (Cloud Track)

    - + cvc5-cloud 1 045600.0000380 @@ -146,7 +146,7 @@

    AUFNIRA (Cloud Track)

    - + cvc5-cloud 0 00.00000380 @@ -155,7 +155,7 @@

    AUFNIRA (Cloud Track)

    - + Vampire 0 00.00000380 @@ -175,7 +175,7 @@

    AUFNIRA (Cloud Track)

    - + Vampire 0 4880.5414040340 @@ -184,7 +184,7 @@

    AUFNIRA (Cloud Track)

    - + cvc5-cloud 1 04800.00004340 @@ -204,7 +204,7 @@

    AUFNIRA (Cloud Track)

    - + Vampire 0 1904.2881013737 @@ -213,7 +213,7 @@

    AUFNIRA (Cloud Track)

    - + cvc5-cloud 0 0912.00003838 @@ -237,7 +237,6 @@

    AUFNIRA (Cloud Track)

    - + - diff --git a/archive/2022/results/aufnira-incremental.html b/archive/2022/results/aufnira-incremental.html index b16397bd..f54ea1ba 100644 --- a/archive/2022/results/aufnira-incremental.html +++ b/archive/2022/results/aufnira-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFNIRA (Incremental Track)

    Competition results for the AUFNIRA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    AUFNIRA (Incremental Track)

    Parallel Performance cvc5 - - + + @@ -124,7 +124,7 @@

    AUFNIRA (Incremental Track)

    - + cvc5 0 309225141.92825141.25736020 @@ -133,7 +133,7 @@

    AUFNIRA (Incremental Track)

    - + 2020-z3n 0 282945158.41945159.79562331 @@ -142,7 +142,7 @@

    AUFNIRA (Incremental Track)

    - + z3-4.8.17n 0 273149103.90649105.74372136 @@ -151,7 +151,7 @@

    AUFNIRA (Incremental Track)

    - + smtinterpol 0 0274.92120.82134520 @@ -160,7 +160,7 @@

    AUFNIRA (Incremental Track)

    - + UltimateEliminator+MathSAT 0 0687.79308.67134520 @@ -184,7 +184,6 @@

    AUFNIRA (Incremental Track)

    - + - diff --git a/archive/2022/results/aufnira-parallel.html b/archive/2022/results/aufnira-parallel.html index 7d25056d..d8591135 100644 --- a/archive/2022/results/aufnira-parallel.html +++ b/archive/2022/results/aufnira-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFNIRA (Parallel Track)

    Competition results for the AUFNIRA - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    AUFNIRA (Parallel Track)

    - + Vampire 0 542487.253505330 @@ -137,7 +137,7 @@

    AUFNIRA (Parallel Track)

    - + Vampire 0 00.00000380 @@ -157,7 +157,7 @@

    AUFNIRA (Parallel Track)

    - + Vampire 0 52887.2535050330 @@ -177,7 +177,7 @@

    AUFNIRA (Parallel Track)

    - + Vampire 0 0912.00003838 @@ -201,7 +201,6 @@

    AUFNIRA (Parallel Track)

    - + - diff --git a/archive/2022/results/aufnira-proof-exhibition.html b/archive/2022/results/aufnira-proof-exhibition.html index a20f47c3..63d6f8c7 100644 --- a/archive/2022/results/aufnira-proof-exhibition.html +++ b/archive/2022/results/aufnira-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFNIRA (Proof Exhibition Track)

    Competition results for the AUFNIRA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    AUFNIRA (Proof Exhibition Track)

    - + cvc5 0 519 @@ -130,7 +130,7 @@

    AUFNIRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 519 @@ -152,7 +152,7 @@

    AUFNIRA (Proof Exhibition Track)

    - + cvc5 0 51936693.09436687.7683029 @@ -161,7 +161,7 @@

    AUFNIRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 51937470.25837470.7453030 @@ -185,7 +185,6 @@

    AUFNIRA (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/aufnira-single-query.html b/archive/2022/results/aufnira-single-query.html index 5e077199..86cbbaa1 100644 --- a/archive/2022/results/aufnira-single-query.html +++ b/archive/2022/results/aufnira-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFNIRA (Single Query Track)

    Competition results for the AUFNIRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    AUFNIRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) VampireVampirecvc5 - - + + Vampire - - + + Vampire - + @@ -131,7 +131,7 @@

    AUFNIRA (Single Query Track)

    - + 2020-CVC4n 0 56 @@ -142,7 +142,7 @@

    AUFNIRA (Single Query Track)

    - + Vampire 0 49 @@ -153,7 +153,7 @@

    AUFNIRA (Single Query Track)

    - + cvc5 0 47 @@ -164,7 +164,7 @@

    AUFNIRA (Single Query Track)

    - + z3-4.8.17n 0 32 @@ -175,7 +175,7 @@

    AUFNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -197,7 +197,7 @@

    AUFNIRA (Single Query Track)

    - + 2020-CVC4n 0 56296282.656296472.0356155244242 @@ -206,7 +206,7 @@

    AUFNIRA (Single Query Track)

    - + Vampire 0 52328671.178300872.32252052248247 @@ -215,7 +215,7 @@

    AUFNIRA (Single Query Track)

    - + cvc5 0 47303587.777303625.03547146253251 @@ -224,7 +224,7 @@

    AUFNIRA (Single Query Track)

    - + z3-4.8.17n 0 32217096.623220579.3953242826898 @@ -233,7 +233,7 @@

    AUFNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 01486.175888.2010003000 @@ -253,7 +253,7 @@

    AUFNIRA (Single Query Track)

    - + z3-4.8.17n 0 40.0850.078440029698 @@ -262,7 +262,7 @@

    AUFNIRA (Single Query Track)

    - + cvc5 0 11711.7781712.2691103296251 @@ -271,7 +271,7 @@

    AUFNIRA (Single Query Track)

    - + 2020-CVC4n 0 12047.9692121.6761103296242 @@ -280,7 +280,7 @@

    AUFNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 019.1111.97800042960 @@ -289,7 +289,7 @@

    AUFNIRA (Single Query Track)

    - + Vampire 0 04800.04800.00004296247 @@ -309,7 +309,7 @@

    AUFNIRA (Single Query Track)

    - + 2020-CVC4n 0 5514634.68714750.354550558237242 @@ -318,7 +318,7 @@

    AUFNIRA (Single Query Track)

    - + Vampire 0 5226270.00816502.1925205211237247 @@ -327,7 +327,7 @@

    AUFNIRA (Single Query Track)

    - + cvc5 0 4622276.022312.7664604617237251 @@ -336,7 +336,7 @@

    AUFNIRA (Single Query Track)

    - + z3-4.8.17n 0 2829333.79729336.906280283523798 @@ -345,7 +345,7 @@

    AUFNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0322.398193.233000632370 @@ -365,7 +365,7 @@

    AUFNIRA (Single Query Track)

    - + Vampire 0 416501.2086293.60741041259258 @@ -374,7 +374,7 @@

    AUFNIRA (Single Query Track)

    - + z3-4.8.17n 0 296415.586415.52529425271261 @@ -383,7 +383,7 @@

    AUFNIRA (Single Query Track)

    - + 2020-CVC4n 0 296523.9676523.96529029271270 @@ -392,7 +392,7 @@

    AUFNIRA (Single Query Track)

    - + cvc5 0 276560.1166560.15127027273271 @@ -401,7 +401,7 @@

    AUFNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 01486.175888.2010003000 @@ -425,7 +425,6 @@

    AUFNIRA (Single Query Track)

    - + - diff --git a/archive/2022/results/aufnira-unsat-core.html b/archive/2022/results/aufnira-unsat-core.html index af356237..b88ab3a0 100644 --- a/archive/2022/results/aufnira-unsat-core.html +++ b/archive/2022/results/aufnira-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFNIRA (Unsat Core Track)

    Competition results for the AUFNIRA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    AUFNIRA (Unsat Core Track)

    Sequential PerformanceParallel Performance VampireVampire - - + + @@ -126,7 +126,7 @@

    AUFNIRA (Unsat Core Track)

    - + Vampire 0 17761 @@ -137,7 +137,7 @@

    AUFNIRA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 17040 @@ -148,7 +148,7 @@

    AUFNIRA (Unsat Core Track)

    - + cvc5 0 17001 @@ -159,7 +159,7 @@

    AUFNIRA (Unsat Core Track)

    - + z3-4.8.17n 0 14899 @@ -170,7 +170,7 @@

    AUFNIRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 832 @@ -192,7 +192,7 @@

    AUFNIRA (Unsat Core Track)

    - + Vampire 0 1863920913.50413385.2579 @@ -201,7 +201,7 @@

    AUFNIRA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 1704033926.91633913.23128 @@ -210,7 +210,7 @@

    AUFNIRA (Unsat Core Track)

    - + cvc5 0 1700136619.77836619.77630 @@ -219,7 +219,7 @@

    AUFNIRA (Unsat Core Track)

    - + z3-4.8.17n 0 1489927705.08927708.32512 @@ -228,7 +228,7 @@

    AUFNIRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 8324048.1562859.6771 @@ -252,7 +252,6 @@

    AUFNIRA (Unsat Core Track)

    - + - diff --git a/archive/2022/results/biggest-lead-incremental.html b/archive/2022/results/biggest-lead-incremental.html index be579b39..201845ca 100644 --- a/archive/2022/results/biggest-lead-incremental.html +++ b/archive/2022/results/biggest-lead-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -90,27 +90,27 @@

    Winners

    - - + + - - - - + + + + - - + + - - - - + + + +
    Parallel Performance
    - + smtinterpol - +
    @@ -130,7 +130,7 @@

    Parallel Performance

    - + smtinterpol 2.48905696 @@ -144,7 +144,7 @@

    Parallel Performance

    - + Yices2 1.85578631 @@ -158,7 +158,7 @@

    Parallel Performance

    - + cvc5 1.48916592 @@ -172,7 +172,7 @@

    Parallel Performance

    - + cvc5 1.20828761 @@ -186,7 +186,7 @@

    Parallel Performance

    - + Yices2 1.19119048 @@ -200,7 +200,7 @@

    Parallel Performance

    - + smtinterpol 1.11456386 @@ -214,7 +214,7 @@

    Parallel Performance

    - + Bitwuzla 1.11455789 @@ -228,7 +228,7 @@

    Parallel Performance

    - + Bitwuzla 1.06015504 @@ -242,7 +242,7 @@

    Parallel Performance

    - + cvc5 1.05437693 @@ -256,7 +256,7 @@

    Parallel Performance

    - + smtinterpol 1.04804128 @@ -270,7 +270,7 @@

    Parallel Performance

    - + cvc5 1.0241521 @@ -284,7 +284,7 @@

    Parallel Performance

    - + Yices2 1.00493061 @@ -298,7 +298,7 @@

    Parallel Performance

    - + OpenSMT 1.00332226 @@ -312,7 +312,7 @@

    Parallel Performance

    - + UltimateEliminator+MathSAT 1.0 @@ -326,7 +326,7 @@

    Parallel Performance

    - + Yices2 1.0 @@ -340,7 +340,7 @@

    Parallel Performance

    - + cvc5 1.0 @@ -378,7 +378,6 @@

    Parallel Performance

    - + - diff --git a/archive/2022/results/biggest-lead-model-validation.html b/archive/2022/results/biggest-lead-model-validation.html index 974b2875..d05cf57b 100644 --- a/archive/2022/results/biggest-lead-model-validation.html +++ b/archive/2022/results/biggest-lead-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -90,35 +90,35 @@

    Winners

    - + - - + + - - - - + + + + - + - - + + - - - - + + + +
    Sequential PerformanceParallel Performance
    - + Z3++ - + - + Z3++ - +

    Sequential Performance

    @@ -136,7 +136,7 @@

    Sequential Performance

    - + Z3++ 1.0374646 @@ -150,7 +150,7 @@

    Sequential Performance

    - + smtinterpol 1.03161593 @@ -164,7 +164,7 @@

    Sequential Performance

    - + OpenSMT 1.00830565 @@ -178,7 +178,7 @@

    Sequential Performance

    - + Bitwuzla 1.00358769 @@ -192,7 +192,7 @@

    Sequential Performance

    - + Bitwuzla 1.00133164 @@ -206,7 +206,7 @@

    Sequential Performance

    - + Yices2 1.0 @@ -220,7 +220,7 @@

    Sequential Performance

    - + Bitwuzla 1.0 @@ -249,7 +249,7 @@

    Parallel Performance

    - + Z3++ 1.0374646 @@ -263,7 +263,7 @@

    Parallel Performance

    - + smtinterpol 1.03161593 @@ -277,7 +277,7 @@

    Parallel Performance

    - + OpenSMT 1.00830565 @@ -291,7 +291,7 @@

    Parallel Performance

    - + Bitwuzla 1.00358769 @@ -305,7 +305,7 @@

    Parallel Performance

    - + Bitwuzla 1.00133164 @@ -319,7 +319,7 @@

    Parallel Performance

    - + Yices2 1.0 @@ -333,7 +333,7 @@

    Parallel Performance

    - + Bitwuzla 1.0 @@ -371,7 +371,6 @@

    Parallel Performance

    - + - diff --git a/archive/2022/results/biggest-lead-single-query.html b/archive/2022/results/biggest-lead-single-query.html index eec1d4c6..3025643b 100644 --- a/archive/2022/results/biggest-lead-single-query.html +++ b/archive/2022/results/biggest-lead-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -90,59 +90,59 @@

    Winners

    - + - - + + - - + + - - + + - - + + - + - + - - + + - - + + - - + + - - + + - +
    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24 seconds Performance (parallel)
    - + cvc5 - + - + cvc5 - + - + cvc5 - + - + cvc5 - + - + cvc5 - +

    Sequential Performance

    @@ -160,7 +160,7 @@

    Sequential Performance

    - + cvc5 10.61111111 @@ -174,7 +174,7 @@

    Sequential Performance

    - + cvc5 3.26117647 @@ -188,7 +188,7 @@

    Sequential Performance

    - + cvc5 1.32388664 @@ -202,7 +202,7 @@

    Sequential Performance

    - + cvc5 1.27700312 @@ -216,7 +216,7 @@

    Sequential Performance

    - + cvc5 1.13517034 @@ -230,7 +230,7 @@

    Sequential Performance

    - + cvc5 1.10313316 @@ -244,7 +244,7 @@

    Sequential Performance

    - + Bitwuzla 1.06714944 @@ -258,7 +258,7 @@

    Sequential Performance

    - + cvc5 1.05595829 @@ -272,7 +272,7 @@

    Sequential Performance

    - + cvc5 1.04280973 @@ -286,7 +286,7 @@

    Sequential Performance

    - + cvc5 1.0375 @@ -300,7 +300,7 @@

    Sequential Performance

    - + cvc5 1.02641056 @@ -314,7 +314,7 @@

    Sequential Performance

    - + cvc5 1.02290076 @@ -328,7 +328,7 @@

    Sequential Performance

    - + Bitwuzla 1.01827802 @@ -342,7 +342,7 @@

    Sequential Performance

    - + Bitwuzla 1.00699301 @@ -356,7 +356,7 @@

    Sequential Performance

    - + Yices2 1.00670241 @@ -370,7 +370,7 @@

    Sequential Performance

    - + Bitwuzla 1.00658216 @@ -384,7 +384,7 @@

    Sequential Performance

    - + OpenSMT 1.00337937 @@ -398,7 +398,7 @@

    Sequential Performance

    - + smtinterpol 1.00115075 @@ -412,7 +412,7 @@

    Sequential Performance

    - + Yices2 1.00026364 @@ -441,7 +441,7 @@

    Parallel Performance

    - + cvc5 6.82142857 @@ -455,7 +455,7 @@

    Parallel Performance

    - + cvc5 3.26117647 @@ -469,7 +469,7 @@

    Parallel Performance

    - + cvc5 1.32388664 @@ -483,7 +483,7 @@

    Parallel Performance

    - + cvc5 1.19746341 @@ -497,7 +497,7 @@

    Parallel Performance

    - + cvc5 1.10667188 @@ -511,7 +511,7 @@

    Parallel Performance

    - + Bitwuzla 1.06714944 @@ -525,7 +525,7 @@

    Parallel Performance

    - + cvc5 1.05608546 @@ -539,7 +539,7 @@

    Parallel Performance

    - + cvc5 1.04280973 @@ -553,7 +553,7 @@

    Parallel Performance

    - + cvc5 1.04192355 @@ -567,7 +567,7 @@

    Parallel Performance

    - + cvc5 1.0375 @@ -581,7 +581,7 @@

    Parallel Performance

    - + cvc5 1.02290076 @@ -595,7 +595,7 @@

    Parallel Performance

    - + cvc5 1.02272727 @@ -609,7 +609,7 @@

    Parallel Performance

    - + Bitwuzla 1.01827802 @@ -623,7 +623,7 @@

    Parallel Performance

    - + Bitwuzla 1.00699301 @@ -637,7 +637,7 @@

    Parallel Performance

    - + Bitwuzla 1.00670486 @@ -651,7 +651,7 @@

    Parallel Performance

    - + Yices2 1.00670241 @@ -665,7 +665,7 @@

    Parallel Performance

    - + OpenSMT 1.00337937 @@ -679,7 +679,7 @@

    Parallel Performance

    - + smtinterpol 1.00287687 @@ -693,7 +693,7 @@

    Parallel Performance

    - + Yices2 1.00026364 @@ -722,7 +722,7 @@

    SAT Performance

    - + cvc5 52.0 @@ -736,7 +736,7 @@

    SAT Performance

    - + cvc5 15.18181818 @@ -750,7 +750,7 @@

    SAT Performance

    - + cvc5 6.95412844 @@ -764,7 +764,7 @@

    SAT Performance

    - + cvc5 1.74623116 @@ -778,7 +778,7 @@

    SAT Performance

    - + cvc5 1.24102564 @@ -792,7 +792,7 @@

    SAT Performance

    - + cvc5 1.13467947 @@ -806,7 +806,7 @@

    SAT Performance

    - + Bitwuzla 1.07730673 @@ -820,7 +820,7 @@

    SAT Performance

    - + Z3++ 1.07148594 @@ -834,7 +834,7 @@

    SAT Performance

    - + cvc5 1.05532787 @@ -848,7 +848,7 @@

    SAT Performance

    - + cvc5 1.046875 @@ -862,7 +862,7 @@

    SAT Performance

    - + Z3++ 1.03947368 @@ -876,7 +876,7 @@

    SAT Performance

    - + smtinterpol 1.03559871 @@ -890,7 +890,7 @@

    SAT Performance

    - + YicesQS 1.02893309 @@ -904,7 +904,7 @@

    SAT Performance

    - + Yices2 1.01415094 @@ -918,7 +918,7 @@

    SAT Performance

    - + cvc5 1.01310044 @@ -932,7 +932,7 @@

    SAT Performance

    - + Bitwuzla 1.0102209 @@ -946,7 +946,7 @@

    SAT Performance

    - + Bitwuzla 1.00380807 @@ -960,7 +960,7 @@

    SAT Performance

    - + Yices2 1.0 @@ -974,7 +974,7 @@

    SAT Performance

    - + Bitwuzla 1.0 @@ -1003,7 +1003,7 @@

    UNSAT Performance

    - + cvc5 5.0 @@ -1017,7 +1017,7 @@

    UNSAT Performance

    - + cvc5 2.60643564 @@ -1031,7 +1031,7 @@

    UNSAT Performance

    - + cvc5 1.62264151 @@ -1045,7 +1045,7 @@

    UNSAT Performance

    - + Z3++ 1.12342216 @@ -1059,7 +1059,7 @@

    UNSAT Performance

    - + cvc5 1.08459596 @@ -1073,7 +1073,7 @@

    UNSAT Performance

    - + Bitwuzla 1.06344951 @@ -1087,7 +1087,7 @@

    UNSAT Performance

    - + cvc5 1.06204878 @@ -1101,7 +1101,7 @@

    UNSAT Performance

    - + cvc5 1.04373581 @@ -1115,7 +1115,7 @@

    UNSAT Performance

    - + Bitwuzla 1.04302477 @@ -1129,7 +1129,7 @@

    UNSAT Performance

    - + cvc5 1.03612335 @@ -1143,7 +1143,7 @@

    UNSAT Performance

    - + cvc5 1.03301016 @@ -1157,7 +1157,7 @@

    UNSAT Performance

    - + cvc5 1.02631579 @@ -1171,7 +1171,7 @@

    UNSAT Performance

    - + Yices2 1.02138365 @@ -1185,7 +1185,7 @@

    UNSAT Performance

    - + cvc5 1.01242236 @@ -1199,7 +1199,7 @@

    UNSAT Performance

    - + Bitwuzla 1.01135074 @@ -1213,7 +1213,7 @@

    UNSAT Performance

    - + cvc5 1.00852053 @@ -1227,7 +1227,7 @@

    UNSAT Performance

    - + STP 1.00192493 @@ -1241,7 +1241,7 @@

    UNSAT Performance

    - + cvc5 1.00160128 @@ -1255,7 +1255,7 @@

    UNSAT Performance

    - + Yices2 1.00044783 @@ -1284,7 +1284,7 @@

    24s Performance

    - + cvc5 3.19169329 @@ -1298,7 +1298,7 @@

    24s Performance

    - + smtinterpol 2.0 @@ -1312,7 +1312,7 @@

    24s Performance

    - + cvc5 1.57346939 @@ -1326,7 +1326,7 @@

    24s Performance

    - + cvc5 1.28444444 @@ -1340,7 +1340,7 @@

    24s Performance

    - + Vampire 1.28359788 @@ -1354,7 +1354,7 @@

    24s Performance

    - + cvc5 1.23645833 @@ -1368,7 +1368,7 @@

    24s Performance

    - + Yices2 1.16805959 @@ -1382,7 +1382,7 @@

    24s Performance

    - + Q3B 1.12150838 @@ -1396,7 +1396,7 @@

    24s Performance

    - + Bitwuzla 1.0908414 @@ -1410,7 +1410,7 @@

    24s Performance

    - + Bitwuzla 1.07785888 @@ -1424,7 +1424,7 @@

    24s Performance

    - + cvc5 1.06565513 @@ -1438,7 +1438,7 @@

    24s Performance

    - + Yices2 1.05067568 @@ -1452,7 +1452,7 @@

    24s Performance

    - + YicesQS 1.04996097 @@ -1466,7 +1466,7 @@

    24s Performance

    - + Yices2 1.01808318 @@ -1480,7 +1480,7 @@

    24s Performance

    - + Yices2 1.01336541 @@ -1494,7 +1494,7 @@

    24s Performance

    - + Bitwuzla 1.00824742 @@ -1508,7 +1508,7 @@

    24s Performance

    - + STP 1.00712965 @@ -1522,7 +1522,7 @@

    24s Performance

    - + cvc5 1.00575476 @@ -1536,7 +1536,7 @@

    24s Performance

    - + Yices2 1.0 @@ -1568,7 +1568,6 @@

    24s Performance

    - + - diff --git a/archive/2022/results/biggest-lead-unsat-core.html b/archive/2022/results/biggest-lead-unsat-core.html index 1a34e720..d48e48e1 100644 --- a/archive/2022/results/biggest-lead-unsat-core.html +++ b/archive/2022/results/biggest-lead-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -90,35 +90,35 @@

    Winners

    - + - - + + - - - - + + + + - + - - + + - - - - + + + +
    Sequential PerformanceParallel Performance
    - + cvc5 - + - + cvc5 - +

    Sequential Performance

    @@ -136,7 +136,7 @@

    Sequential Performance

    - + cvc5 49432.0 @@ -150,7 +150,7 @@

    Sequential Performance

    - + cvc5 48.0 @@ -164,7 +164,7 @@

    Sequential Performance

    - + cvc5 23.90909091 @@ -178,7 +178,7 @@

    Sequential Performance

    - + cvc5 4.7918964 @@ -192,7 +192,7 @@

    Sequential Performance

    - + Yices2 4.38530866 @@ -206,7 +206,7 @@

    Sequential Performance

    - + smtinterpol 2.1218288 @@ -220,7 +220,7 @@

    Sequential Performance

    - + cvc5 1.54142577 @@ -234,7 +234,7 @@

    Sequential Performance

    - + Bitwuzla 1.31216922 @@ -248,7 +248,7 @@

    Sequential Performance

    - + Bitwuzla 1.20787566 @@ -262,7 +262,7 @@

    Sequential Performance

    - + Yices2 1.16805827 @@ -276,7 +276,7 @@

    Sequential Performance

    - + cvc5 1.1310733 @@ -290,7 +290,7 @@

    Sequential Performance

    - + Bitwuzla 1.12967779 @@ -304,7 +304,7 @@

    Sequential Performance

    - + Yices2 1.04429852 @@ -318,7 +318,7 @@

    Sequential Performance

    - + Yices2 1.02665097 @@ -332,7 +332,7 @@

    Sequential Performance

    - + cvc5 1.00331716 @@ -361,7 +361,7 @@

    Parallel Performance

    - + cvc5 49432.0 @@ -375,7 +375,7 @@

    Parallel Performance

    - + cvc5 48.0 @@ -389,7 +389,7 @@

    Parallel Performance

    - + cvc5 23.90909091 @@ -403,7 +403,7 @@

    Parallel Performance

    - + Yices2 3.85400302 @@ -417,7 +417,7 @@

    Parallel Performance

    - + cvc5 2.27806368 @@ -431,7 +431,7 @@

    Parallel Performance

    - + smtinterpol 2.1218288 @@ -445,7 +445,7 @@

    Parallel Performance

    - + cvc5 1.45521024 @@ -459,7 +459,7 @@

    Parallel Performance

    - + Bitwuzla 1.31216922 @@ -473,7 +473,7 @@

    Parallel Performance

    - + Bitwuzla 1.20787566 @@ -487,7 +487,7 @@

    Parallel Performance

    - + Yices2 1.14781276 @@ -501,7 +501,7 @@

    Parallel Performance

    - + Bitwuzla 1.12967779 @@ -515,7 +515,7 @@

    Parallel Performance

    - + cvc5 1.1196782 @@ -529,7 +529,7 @@

    Parallel Performance

    - + Yices2 1.04429809 @@ -543,7 +543,7 @@

    Parallel Performance

    - + Yices2 1.02665097 @@ -557,7 +557,7 @@

    Parallel Performance

    - + Vampire 1.02475574 @@ -595,7 +595,6 @@

    Parallel Performance

    - + - diff --git a/archive/2022/results/bitvec-incremental.html b/archive/2022/results/bitvec-incremental.html index 52c2677e..dddbce84 100644 --- a/archive/2022/results/bitvec-incremental.html +++ b/archive/2022/results/bitvec-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Bitvec (Incremental Track)

    Competition results for the Bitvec - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    Bitvec (Incremental Track)

    Parallel Performance cvc5 - - + + @@ -124,7 +124,7 @@

    Bitvec (Incremental Track)

    - + 2019-Z3n 0 371717566.3667565.472168505 @@ -133,7 +133,7 @@

    Bitvec (Incremental Track)

    - + z3-4.8.17n 0 366466530.9346529.85221005 @@ -142,7 +142,7 @@

    Bitvec (Incremental Track)

    - + cvc5 0 358329532.3669529.826302407 @@ -151,7 +151,7 @@

    Bitvec (Incremental Track)

    - + Bitwuzla 0 3398412768.60212768.409487209 @@ -160,7 +160,7 @@

    Bitvec (Incremental Track)

    - + UltimateEliminator+MathSAT 0 189121520.2781367.4321994401 @@ -184,7 +184,6 @@

    Bitvec (Incremental Track)

    - + - diff --git a/archive/2022/results/bitvec-proof-exhibition.html b/archive/2022/results/bitvec-proof-exhibition.html index 569c70b6..cc971af9 100644 --- a/archive/2022/results/bitvec-proof-exhibition.html +++ b/archive/2022/results/bitvec-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Bitvec (Proof Exhibition Track)

    Competition results for the Bitvec - + division - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    Bitvec (Proof Exhibition Track)

    - + cvc5-lfsc 0 2389 @@ -130,7 +130,7 @@

    Bitvec (Proof Exhibition Track)

    - + cvc5 0 2176 @@ -152,7 +152,7 @@

    Bitvec (Proof Exhibition Track)

    - + cvc5-lfsc 0 2389335173.347335146.7592710264 @@ -161,7 +161,7 @@

    Bitvec (Proof Exhibition Track)

    - + cvc5 0 2176559780.099559505.8174840449 @@ -185,7 +185,6 @@

    Bitvec (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/bitvec-single-query.html b/archive/2022/results/bitvec-single-query.html index c2819a06..7110422c 100644 --- a/archive/2022/results/bitvec-single-query.html +++ b/archive/2022/results/bitvec-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Bitvec (Single Query Track)

    Competition results for the Bitvec - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    Bitvec (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + Q3B - + @@ -131,7 +131,7 @@

    Bitvec (Single Query Track)

    - + 2019-Par4n 0 884 @@ -142,7 +142,7 @@

    Bitvec (Single Query Track)

    - + cvc5 0 854 @@ -153,7 +153,7 @@

    Bitvec (Single Query Track)

    - + Q3B 0 832 @@ -164,7 +164,7 @@

    Bitvec (Single Query Track)

    - + z3-4.8.17n 0 775 @@ -175,7 +175,7 @@

    Bitvec (Single Query Track)

    - + Bitwuzla 0 759 @@ -186,7 +186,7 @@

    Bitvec (Single Query Track)

    - + Q3B-pBDD 0 753 @@ -197,7 +197,7 @@

    Bitvec (Single Query Track)

    - + YicesQS 0 708 @@ -208,7 +208,7 @@

    Bitvec (Single Query Track)

    - + UltimateEliminator+MathSAT 0 304 @@ -230,7 +230,7 @@

    Bitvec (Single Query Track)

    - + 2019-Par4n 0 894126521.801100383.77789424465076076 @@ -239,7 +239,7 @@

    Bitvec (Single Query Track)

    - + cvc5 0 854164783.792165475.4358542316231160116 @@ -248,7 +248,7 @@

    Bitvec (Single Query Track)

    - + Q3B 0 835173630.993165779.9238352286071350132 @@ -257,7 +257,7 @@

    Bitvec (Single Query Track)

    - + z3-4.8.17n 0 775232351.965232315.7437752125631950182 @@ -266,7 +266,7 @@

    Bitvec (Single Query Track)

    - + Bitwuzla 0 759252001.465251982.5737592075522110197 @@ -275,7 +275,7 @@

    Bitvec (Single Query Track)

    - + Q3B-pBDD 0 754272853.117270065.627541845702160213 @@ -284,7 +284,7 @@

    Bitvec (Single Query Track)

    - + YicesQS 0 708318262.215318260.6357081925162620262 @@ -293,7 +293,7 @@

    Bitvec (Single Query Track)

    - + UltimateEliminator+MathSAT 0 304206554.511204316.276304272776660137 @@ -313,7 +313,7 @@

    Bitvec (Single Query Track)

    - + 2019-Par4n 0 24433317.72622825.98124424401671076 @@ -322,7 +322,7 @@

    Bitvec (Single Query Track)

    - + cvc5 0 23152530.48953074.316231231029710116 @@ -331,7 +331,7 @@

    Bitvec (Single Query Track)

    - + Q3B 0 22840705.42939151.001228228032710132 @@ -340,7 +340,7 @@

    Bitvec (Single Query Track)

    - + z3-4.8.17n 0 21252689.53752690.324212212048710182 @@ -349,7 +349,7 @@

    Bitvec (Single Query Track)

    - + Bitwuzla 0 20753987.65153986.379207207053710197 @@ -358,7 +358,7 @@

    Bitvec (Single Query Track)

    - + YicesQS 0 19283158.83683158.062192192068710262 @@ -367,7 +367,7 @@

    Bitvec (Single Query Track)

    - + Q3B-pBDD 0 184101080.024100168.464184184076710213 @@ -376,7 +376,7 @@

    Bitvec (Single Query Track)

    - + UltimateEliminator+MathSAT 0 27121779.362120767.22527270233710137 @@ -396,7 +396,7 @@

    Bitvec (Single Query Track)

    - + 2019-Par4n 0 65062004.07546357.79665006503428676 @@ -405,7 +405,7 @@

    Bitvec (Single Query Track)

    - + cvc5 0 62381053.30481201.119623062361286116 @@ -414,7 +414,7 @@

    Bitvec (Single Query Track)

    - + Q3B 0 607101725.56495428.922607060777286132 @@ -423,7 +423,7 @@

    Bitvec (Single Query Track)

    - + Q3B-pBDD 0 570140573.093138697.1565700570114286213 @@ -432,7 +432,7 @@

    Bitvec (Single Query Track)

    - + z3-4.8.17n 0 563148462.427148425.4185630563121286182 @@ -441,7 +441,7 @@

    Bitvec (Single Query Track)

    - + Bitwuzla 0 552166813.815166796.1945520552132286197 @@ -450,7 +450,7 @@

    Bitvec (Single Query Track)

    - + YicesQS 0 516203903.38203902.5735160516168286262 @@ -459,7 +459,7 @@

    Bitvec (Single Query Track)

    - + UltimateEliminator+MathSAT 0 27757132.97155918.3652770277407286137 @@ -479,7 +479,7 @@

    Bitvec (Single Query Track)

    - + 2019-Par4n 0 8614683.6843203.5438612316301090109 @@ -488,7 +488,7 @@

    Bitvec (Single Query Track)

    - + Q3B 0 8025510.2744597.9638022105921680167 @@ -497,7 +497,7 @@

    Bitvec (Single Query Track)

    - + z3-4.8.17n 0 7385971.4495970.7047382075312320230 @@ -506,7 +506,7 @@

    Bitvec (Single Query Track)

    - + Q3B-pBDD 0 7157170.0956616.717151555602550254 @@ -515,7 +515,7 @@

    Bitvec (Single Query Track)

    - + cvc5 0 7057313.7257303.07051295762650265 @@ -524,7 +524,7 @@

    Bitvec (Single Query Track)

    - + Bitwuzla 0 7007040.5457019.8147001835172700256 @@ -533,7 +533,7 @@

    Bitvec (Single Query Track)

    - + YicesQS 0 6877014.6447012.6876871845032830283 @@ -542,7 +542,7 @@

    Bitvec (Single Query Track)

    - + UltimateEliminator+MathSAT 0 2909529.9427741.171290202706800182 @@ -566,7 +566,6 @@

    Bitvec (Single Query Track)

    - + - diff --git a/archive/2022/results/bitvec-unsat-core.html b/archive/2022/results/bitvec-unsat-core.html index b3e8085d..4930aa0e 100644 --- a/archive/2022/results/bitvec-unsat-core.html +++ b/archive/2022/results/bitvec-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Bitvec (Unsat Core Track)

    Competition results for the Bitvec - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    Bitvec (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    Bitvec (Unsat Core Track)

    - + 2020-CVC4-ucn 0 61 @@ -137,7 +137,7 @@

    Bitvec (Unsat Core Track)

    - + cvc5 0 47 @@ -148,7 +148,7 @@

    Bitvec (Unsat Core Track)

    - + z3-4.8.17n 0 42 @@ -159,7 +159,7 @@

    Bitvec (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -181,7 +181,7 @@

    Bitvec (Unsat Core Track)

    - + 2020-CVC4-ucn 0 6195354.65895344.70877 @@ -190,7 +190,7 @@

    Bitvec (Unsat Core Track)

    - + cvc5 0 4794269.91294265.76477 @@ -199,7 +199,7 @@

    Bitvec (Unsat Core Track)

    - + z3-4.8.17n 0 4223182.47923175.5613 @@ -208,7 +208,7 @@

    Bitvec (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 04041.8113069.9430 @@ -232,7 +232,6 @@

    Bitvec (Unsat Core Track)

    - + - diff --git a/archive/2022/results/bv-incremental.html b/archive/2022/results/bv-incremental.html index 4a9f4322..87d0a067 100644 --- a/archive/2022/results/bv-incremental.html +++ b/archive/2022/results/bv-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    BV (Incremental Track)

    Competition results for the BV - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    BV (Incremental Track)

    Parallel Performance cvc5 - - + + @@ -124,7 +124,7 @@

    BV (Incremental Track)

    - + 2019-Z3n 0 371717566.3667565.47216855 @@ -133,7 +133,7 @@

    BV (Incremental Track)

    - + z3-4.8.17n 0 366466530.9346529.8522105 @@ -142,7 +142,7 @@

    BV (Incremental Track)

    - + cvc5 0 358329532.3669529.82630247 @@ -151,7 +151,7 @@

    BV (Incremental Track)

    - + Bitwuzla 0 3398412768.60212768.40948729 @@ -160,7 +160,7 @@

    BV (Incremental Track)

    - + UltimateEliminator+MathSAT 0 189121520.2781367.432199441 @@ -184,7 +184,6 @@

    BV (Incremental Track)

    - + - diff --git a/archive/2022/results/bv-proof-exhibition.html b/archive/2022/results/bv-proof-exhibition.html index c1d2b308..c59ce8be 100644 --- a/archive/2022/results/bv-proof-exhibition.html +++ b/archive/2022/results/bv-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    BV (Proof Exhibition Track)

    Competition results for the BV - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    BV (Proof Exhibition Track)

    - + cvc5-lfsc 0 2389 @@ -130,7 +130,7 @@

    BV (Proof Exhibition Track)

    - + cvc5 0 2176 @@ -152,7 +152,7 @@

    BV (Proof Exhibition Track)

    - + cvc5-lfsc 0 2389335173.347335146.759271264 @@ -161,7 +161,7 @@

    BV (Proof Exhibition Track)

    - + cvc5 0 2176559780.099559505.817484449 @@ -185,7 +185,6 @@

    BV (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/bv-single-query.html b/archive/2022/results/bv-single-query.html index 7121845f..1578d30f 100644 --- a/archive/2022/results/bv-single-query.html +++ b/archive/2022/results/bv-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    BV (Single Query Track)

    Competition results for the BV - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    BV (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + Q3B - + @@ -131,7 +131,7 @@

    BV (Single Query Track)

    - + 2019-Par4n 0 884 @@ -142,7 +142,7 @@

    BV (Single Query Track)

    - + cvc5 0 854 @@ -153,7 +153,7 @@

    BV (Single Query Track)

    - + Q3B 0 832 @@ -164,7 +164,7 @@

    BV (Single Query Track)

    - + z3-4.8.17n 0 775 @@ -175,7 +175,7 @@

    BV (Single Query Track)

    - + Bitwuzla 0 759 @@ -186,7 +186,7 @@

    BV (Single Query Track)

    - + Q3B-pBDD 0 753 @@ -197,7 +197,7 @@

    BV (Single Query Track)

    - + YicesQS 0 708 @@ -208,7 +208,7 @@

    BV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 304 @@ -230,7 +230,7 @@

    BV (Single Query Track)

    - + 2019-Par4n 0 894126521.801100383.7778942446507676 @@ -239,7 +239,7 @@

    BV (Single Query Track)

    - + cvc5 0 854164783.792165475.435854231623116116 @@ -248,7 +248,7 @@

    BV (Single Query Track)

    - + Q3B 0 835173630.993165779.923835228607135132 @@ -257,7 +257,7 @@

    BV (Single Query Track)

    - + z3-4.8.17n 0 775232351.965232315.743775212563195182 @@ -266,7 +266,7 @@

    BV (Single Query Track)

    - + Bitwuzla 0 759252001.465251982.573759207552211197 @@ -275,7 +275,7 @@

    BV (Single Query Track)

    - + Q3B-pBDD 0 754272853.117270065.62754184570216213 @@ -284,7 +284,7 @@

    BV (Single Query Track)

    - + YicesQS 0 708318262.215318260.635708192516262262 @@ -293,7 +293,7 @@

    BV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 304206554.511204316.27630427277666137 @@ -313,7 +313,7 @@

    BV (Single Query Track)

    - + 2019-Par4n 0 24433317.72622825.98124424401671076 @@ -322,7 +322,7 @@

    BV (Single Query Track)

    - + cvc5 0 23152530.48953074.316231231029710116 @@ -331,7 +331,7 @@

    BV (Single Query Track)

    - + Q3B 0 22840705.42939151.001228228032710132 @@ -340,7 +340,7 @@

    BV (Single Query Track)

    - + z3-4.8.17n 0 21252689.53752690.324212212048710182 @@ -349,7 +349,7 @@

    BV (Single Query Track)

    - + Bitwuzla 0 20753987.65153986.379207207053710197 @@ -358,7 +358,7 @@

    BV (Single Query Track)

    - + YicesQS 0 19283158.83683158.062192192068710262 @@ -367,7 +367,7 @@

    BV (Single Query Track)

    - + Q3B-pBDD 0 184101080.024100168.464184184076710213 @@ -376,7 +376,7 @@

    BV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 27121779.362120767.22527270233710137 @@ -396,7 +396,7 @@

    BV (Single Query Track)

    - + 2019-Par4n 0 65062004.07546357.79665006503428676 @@ -405,7 +405,7 @@

    BV (Single Query Track)

    - + cvc5 0 62381053.30481201.119623062361286116 @@ -414,7 +414,7 @@

    BV (Single Query Track)

    - + Q3B 0 607101725.56495428.922607060777286132 @@ -423,7 +423,7 @@

    BV (Single Query Track)

    - + Q3B-pBDD 0 570140573.093138697.1565700570114286213 @@ -432,7 +432,7 @@

    BV (Single Query Track)

    - + z3-4.8.17n 0 563148462.427148425.4185630563121286182 @@ -441,7 +441,7 @@

    BV (Single Query Track)

    - + Bitwuzla 0 552166813.815166796.1945520552132286197 @@ -450,7 +450,7 @@

    BV (Single Query Track)

    - + YicesQS 0 516203903.38203902.5735160516168286262 @@ -459,7 +459,7 @@

    BV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 27757132.97155918.3652770277407286137 @@ -479,7 +479,7 @@

    BV (Single Query Track)

    - + 2019-Par4n 0 8614683.6843203.543861231630109109 @@ -488,7 +488,7 @@

    BV (Single Query Track)

    - + Q3B 0 8025510.2744597.963802210592168167 @@ -497,7 +497,7 @@

    BV (Single Query Track)

    - + z3-4.8.17n 0 7385971.4495970.704738207531232230 @@ -506,7 +506,7 @@

    BV (Single Query Track)

    - + Q3B-pBDD 0 7157170.0956616.71715155560255254 @@ -515,7 +515,7 @@

    BV (Single Query Track)

    - + cvc5 0 7057313.7257303.0705129576265265 @@ -524,7 +524,7 @@

    BV (Single Query Track)

    - + Bitwuzla 0 7007040.5457019.814700183517270256 @@ -533,7 +533,7 @@

    BV (Single Query Track)

    - + YicesQS 0 6877014.6447012.687687184503283283 @@ -542,7 +542,7 @@

    BV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 2909529.9427741.17129020270680182 @@ -566,7 +566,6 @@

    BV (Single Query Track)

    - + - diff --git a/archive/2022/results/bv-unsat-core.html b/archive/2022/results/bv-unsat-core.html index 97435e93..fc16ca0e 100644 --- a/archive/2022/results/bv-unsat-core.html +++ b/archive/2022/results/bv-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    BV (Unsat Core Track)

    Competition results for the BV - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    BV (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    BV (Unsat Core Track)

    - + 2020-CVC4-ucn 0 61 @@ -137,7 +137,7 @@

    BV (Unsat Core Track)

    - + cvc5 0 47 @@ -148,7 +148,7 @@

    BV (Unsat Core Track)

    - + z3-4.8.17n 0 42 @@ -159,7 +159,7 @@

    BV (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -181,7 +181,7 @@

    BV (Unsat Core Track)

    - + 2020-CVC4-ucn 0 6195354.65895344.70877 @@ -190,7 +190,7 @@

    BV (Unsat Core Track)

    - + cvc5 0 4794269.91294265.76477 @@ -199,7 +199,7 @@

    BV (Unsat Core Track)

    - + z3-4.8.17n 0 4223182.47923175.5613 @@ -208,7 +208,7 @@

    BV (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 04041.8113069.9430 @@ -232,7 +232,6 @@

    BV (Unsat Core Track)

    - + - diff --git a/archive/2022/results/bvfp-incremental.html b/archive/2022/results/bvfp-incremental.html index ff4d13b0..03b7906f 100644 --- a/archive/2022/results/bvfp-incremental.html +++ b/archive/2022/results/bvfp-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    BVFP (Incremental Track)

    Competition results for the BVFP - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    BVFP (Incremental Track)

    Parallel Performance Bitwuzla - - + + @@ -124,7 +124,7 @@

    BVFP (Incremental Track)

    - + Bitwuzla 0 27460.41760.4071840 @@ -133,7 +133,7 @@

    BVFP (Incremental Track)

    - + 2019-CVC4-incn 0 2601200.01200.01981 @@ -142,7 +142,7 @@

    BVFP (Incremental Track)

    - + cvc5 0 2601200.01200.01981 @@ -151,7 +151,7 @@

    BVFP (Incremental Track)

    - + UltimateEliminator+MathSAT 0 23147.3340.9562270 @@ -175,7 +175,6 @@

    BVFP (Incremental Track)

    - + - diff --git a/archive/2022/results/bvfp-proof-exhibition.html b/archive/2022/results/bvfp-proof-exhibition.html index e8f225f2..8fe4a3d9 100644 --- a/archive/2022/results/bvfp-proof-exhibition.html +++ b/archive/2022/results/bvfp-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    BVFP (Proof Exhibition Track)

    Competition results for the BVFP - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    BVFP (Proof Exhibition Track)

    - + cvc5 0 0 @@ -130,7 +130,7 @@

    BVFP (Proof Exhibition Track)

    - + cvc5-lfsc 0 0 @@ -152,7 +152,7 @@

    BVFP (Proof Exhibition Track)

    - + cvc5 0 04832.4254832.427144 @@ -161,7 +161,7 @@

    BVFP (Proof Exhibition Track)

    - + cvc5-lfsc 0 04832.4314832.433144 @@ -185,7 +185,6 @@

    BVFP (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/bvfp-single-query.html b/archive/2022/results/bvfp-single-query.html index c3fa80a7..eb74d7a9 100644 --- a/archive/2022/results/bvfp-single-query.html +++ b/archive/2022/results/bvfp-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    BVFP (Single Query Track)

    Competition results for the BVFP - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    BVFP (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + Bitwuzla - - + + Bitwuzla - + @@ -131,7 +131,7 @@

    BVFP (Single Query Track)

    - + z3-4.8.17n 0 177 @@ -142,7 +142,7 @@

    BVFP (Single Query Track)

    - + cvc5 0 171 @@ -153,7 +153,7 @@

    BVFP (Single Query Track)

    - + Bitwuzla 0 148 @@ -164,7 +164,7 @@

    BVFP (Single Query Track)

    - + 2021-cvc5n 0 128 @@ -175,7 +175,7 @@

    BVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 25 @@ -197,7 +197,7 @@

    BVFP (Single Query Track)

    - + z3-4.8.17n 0 17731686.95831687.046177165122824 @@ -206,7 +206,7 @@

    BVFP (Single Query Track)

    - + cvc5 0 17148809.56548995.40817116473434 @@ -215,7 +215,7 @@

    BVFP (Single Query Track)

    - + Bitwuzla 0 14868201.65268162.652148134145756 @@ -224,7 +224,7 @@

    BVFP (Single Query Track)

    - + 2021-cvc5n 0 12894011.34294015.64412812177777 @@ -233,7 +233,7 @@

    BVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 251379.402980.684252501800 @@ -253,7 +253,7 @@

    BVFP (Single Query Track)

    - + z3-4.8.17n 0 16527153.57427153.5881651650221824 @@ -262,7 +262,7 @@

    BVFP (Single Query Track)

    - + cvc5 0 16434678.91734864.5451641640231834 @@ -271,7 +271,7 @@

    BVFP (Single Query Track)

    - + Bitwuzla 0 13462504.47462465.1991341340531856 @@ -280,7 +280,7 @@

    BVFP (Single Query Track)

    - + 2021-cvc5n 0 12180130.15480134.0771211210661877 @@ -289,7 +289,7 @@

    BVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 251283.094903.86125250162180 @@ -309,7 +309,7 @@

    BVFP (Single Query Track)

    - + Bitwuzla 0 14897.179897.45414014019156 @@ -318,7 +318,7 @@

    BVFP (Single Query Track)

    - + z3-4.8.17n 0 123303.8113303.8712012219124 @@ -327,7 +327,7 @@

    BVFP (Single Query Track)

    - + 2021-cvc5n 0 79081.1889081.567707719177 @@ -336,7 +336,7 @@

    BVFP (Single Query Track)

    - + cvc5 0 79330.6489330.863707719134 @@ -345,7 +345,7 @@

    BVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 077.18360.99000141910 @@ -365,7 +365,7 @@

    BVFP (Single Query Track)

    - + z3-4.8.17n 0 1671020.8141020.52216716073835 @@ -374,7 +374,7 @@

    BVFP (Single Query Track)

    - + Bitwuzla 0 1451474.8041474.82145133126059 @@ -383,7 +383,7 @@

    BVFP (Single Query Track)

    - + cvc5 0 1391614.091614.04113913906666 @@ -392,7 +392,7 @@

    BVFP (Single Query Track)

    - + 2021-cvc5n 0 1212052.9692052.38412111838484 @@ -401,7 +401,7 @@

    BVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 191209.106825.343191901866 @@ -425,7 +425,6 @@

    BVFP (Single Query Track)

    - + - diff --git a/archive/2022/results/bvfp-unsat-core.html b/archive/2022/results/bvfp-unsat-core.html index 660824c9..a77b0340 100644 --- a/archive/2022/results/bvfp-unsat-core.html +++ b/archive/2022/results/bvfp-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    BVFP (Unsat Core Track)

    Competition results for the BVFP - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    BVFP (Unsat Core Track)

    Sequential PerformanceParallel Performance —— - - + + @@ -126,7 +126,7 @@

    BVFP (Unsat Core Track)

    - + z3-4.8.17n 0 1 @@ -137,7 +137,7 @@

    BVFP (Unsat Core Track)

    - + cvc5 0 0 @@ -148,7 +148,7 @@

    BVFP (Unsat Core Track)

    - + 2020-CVC4-ucn 0 0 @@ -159,7 +159,7 @@

    BVFP (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -181,7 +181,7 @@

    BVFP (Unsat Core Track)

    - + z3-4.8.17n 0 11259.9591259.9531 @@ -190,7 +190,7 @@

    BVFP (Unsat Core Track)

    - + cvc5 0 03.5773.5750 @@ -199,7 +199,7 @@

    BVFP (Unsat Core Track)

    - + 2020-CVC4-ucn 0 03.9063.9040 @@ -208,7 +208,7 @@

    BVFP (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 051.05829.4250 @@ -232,7 +232,6 @@

    BVFP (Unsat Core Track)

    - + - diff --git a/archive/2022/results/bvfplra-incremental.html b/archive/2022/results/bvfplra-incremental.html index 619166ae..23254845 100644 --- a/archive/2022/results/bvfplra-incremental.html +++ b/archive/2022/results/bvfplra-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    BVFPLRA (Incremental Track)

    Competition results for the BVFPLRA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    BVFPLRA (Incremental Track)

    Parallel Performance Bitwuzla - - + + @@ -124,7 +124,7 @@

    BVFPLRA (Incremental Track)

    - + Bitwuzla 0 31447259.967260.02824536 @@ -133,7 +133,7 @@

    BVFPLRA (Incremental Track)

    - + cvc5 0 29647228.8277228.5726336 @@ -142,7 +142,7 @@

    BVFPLRA (Incremental Track)

    - + UltimateEliminator+MathSAT 0 2157194.2138.59134400 @@ -166,7 +166,6 @@

    BVFPLRA (Incremental Track)

    - + - diff --git a/archive/2022/results/bvfplra-proof-exhibition.html b/archive/2022/results/bvfplra-proof-exhibition.html index 264f60d9..6db268a3 100644 --- a/archive/2022/results/bvfplra-proof-exhibition.html +++ b/archive/2022/results/bvfplra-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    BVFPLRA (Proof Exhibition Track)

    Competition results for the BVFPLRA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    BVFPLRA (Proof Exhibition Track)

    - + cvc5 0 0 @@ -130,7 +130,7 @@

    BVFPLRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 0 @@ -152,7 +152,7 @@

    BVFPLRA (Proof Exhibition Track)

    - + cvc5 0 010800.010800.099 @@ -161,7 +161,7 @@

    BVFPLRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 010800.010800.099 @@ -185,7 +185,6 @@

    BVFPLRA (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/bvfplra-single-query.html b/archive/2022/results/bvfplra-single-query.html index c4c3a993..8df4f8b2 100644 --- a/archive/2022/results/bvfplra-single-query.html +++ b/archive/2022/results/bvfplra-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    BVFPLRA (Single Query Track)

    Competition results for the BVFPLRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    BVFPLRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) BitwuzlaBitwuzlaBitwuzla - - + + UltimateEliminator+MathSAT - - + + Bitwuzla - + @@ -131,7 +131,7 @@

    BVFPLRA (Single Query Track)

    - + z3-4.8.17n 0 193 @@ -142,7 +142,7 @@

    BVFPLRA (Single Query Track)

    - + Bitwuzla 0 141 @@ -153,7 +153,7 @@

    BVFPLRA (Single Query Track)

    - + 2021-cvc5n 0 104 @@ -164,7 +164,7 @@

    BVFPLRA (Single Query Track)

    - + cvc5 0 104 @@ -175,7 +175,7 @@

    BVFPLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 46 @@ -197,7 +197,7 @@

    BVFPLRA (Single Query Track)

    - + z3-4.8.17n 0 19320751.86620750.9119318491615 @@ -206,7 +206,7 @@

    BVFPLRA (Single Query Track)

    - + Bitwuzla 0 14180818.13780818.31214113296867 @@ -215,7 +215,7 @@

    BVFPLRA (Single Query Track)

    - + cvc5 0 10484087.24284080.8710495910570 @@ -224,7 +224,7 @@

    BVFPLRA (Single Query Track)

    - + 2021-cvc5n 0 10484085.2884085.24210495910570 @@ -233,7 +233,7 @@

    BVFPLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 461475.5351039.953463791630 @@ -253,7 +253,7 @@

    BVFPLRA (Single Query Track)

    - + z3-4.8.17n 0 18418125.56818124.5921841840141115 @@ -262,7 +262,7 @@

    BVFPLRA (Single Query Track)

    - + Bitwuzla 0 13278415.46578415.6331321320661167 @@ -271,7 +271,7 @@

    BVFPLRA (Single Query Track)

    - + cvc5 0 9582861.13682854.761959501031170 @@ -280,7 +280,7 @@

    BVFPLRA (Single Query Track)

    - + 2021-cvc5n 0 9582858.07182858.033959501031170 @@ -289,7 +289,7 @@

    BVFPLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 371386.815973.59237370161110 @@ -309,7 +309,7 @@

    BVFPLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 983.42563.16790911990 @@ -318,7 +318,7 @@

    BVFPLRA (Single Query Track)

    - + Bitwuzla 0 91202.6721202.678909119967 @@ -327,7 +327,7 @@

    BVFPLRA (Single Query Track)

    - + cvc5 0 91224.7631224.768909119970 @@ -336,7 +336,7 @@

    BVFPLRA (Single Query Track)

    - + 2021-cvc5n 0 91226.2961226.296909119970 @@ -345,7 +345,7 @@

    BVFPLRA (Single Query Track)

    - + z3-4.8.17n 0 91426.2981426.318909119915 @@ -365,7 +365,7 @@

    BVFPLRA (Single Query Track)

    - + z3-4.8.17n 0 1691102.6791101.39916916364040 @@ -374,7 +374,7 @@

    BVFPLRA (Single Query Track)

    - + Bitwuzla 0 1401688.2721688.3314013196968 @@ -383,7 +383,7 @@

    BVFPLRA (Single Query Track)

    - + cvc5 0 1041767.2421760.8710495910570 @@ -392,7 +392,7 @@

    BVFPLRA (Single Query Track)

    - + 2021-cvc5n 0 1041765.281765.24210495910570 @@ -401,7 +401,7 @@

    BVFPLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 451448.871020.811453781643 @@ -425,7 +425,6 @@

    BVFPLRA (Single Query Track)

    - + - diff --git a/archive/2022/results/bvfplra-unsat-core.html b/archive/2022/results/bvfplra-unsat-core.html index 3798b8a8..fd6cc121 100644 --- a/archive/2022/results/bvfplra-unsat-core.html +++ b/archive/2022/results/bvfplra-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    BVFPLRA (Unsat Core Track)

    Competition results for the BVFPLRA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    BVFPLRA (Unsat Core Track)

    Sequential PerformanceParallel Performance —— - - + + @@ -126,7 +126,7 @@

    BVFPLRA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 50 @@ -137,7 +137,7 @@

    BVFPLRA (Unsat Core Track)

    - + z3-4.8.17n 0 49 @@ -148,7 +148,7 @@

    BVFPLRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -159,7 +159,7 @@

    BVFPLRA (Unsat Core Track)

    - + cvc5 0 0 @@ -181,7 +181,7 @@

    BVFPLRA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 5010.93410.9320 @@ -190,7 +190,7 @@

    BVFPLRA (Unsat Core Track)

    - + z3-4.8.17n 0 49329.156329.2040 @@ -199,7 +199,7 @@

    BVFPLRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 042.9325.6780 @@ -208,7 +208,7 @@

    BVFPLRA (Unsat Core Track)

    - + cvc5 0 08427.2798427.287 @@ -232,7 +232,6 @@

    BVFPLRA (Unsat Core Track)

    - + - diff --git a/archive/2022/results/equality-cloud.html b/archive/2022/results/equality-cloud.html index ccdafe48..854b8fbe 100644 --- a/archive/2022/results/equality-cloud.html +++ b/archive/2022/results/equality-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Equality (Cloud Track)

    Competition results for the Equality - + division - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    Equality (Cloud Track)

    - + Vampire 0 2851547.5132810185100 @@ -126,7 +126,7 @@

    Equality (Cloud Track)

    - + cvc5-cloud 6 292514.7012027700 @@ -146,7 +146,7 @@

    Equality (Cloud Track)

    - + Vampire 0 105424.143101000690 @@ -155,7 +155,7 @@

    Equality (Cloud Track)

    - + cvc5-cloud 0 012000.000010690 @@ -175,7 +175,7 @@

    Equality (Cloud Track)

    - + Vampire 0 182923.37180181600 @@ -184,7 +184,7 @@

    Equality (Cloud Track)

    - + cvc5-cloud 6 220514.70120217600 @@ -204,7 +204,7 @@

    Equality (Cloud Track)

    - + Vampire 0 81511.92681771057 @@ -213,7 +213,7 @@

    Equality (Cloud Track)

    - + cvc5-cloud 0 11890.46410178078 @@ -237,7 +237,6 @@

    Equality (Cloud Track)

    - + - diff --git a/archive/2022/results/equality-incremental.html b/archive/2022/results/equality-incremental.html index 4c143761..384926a4 100644 --- a/archive/2022/results/equality-incremental.html +++ b/archive/2022/results/equality-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Equality (Incremental Track)

    Competition results for the Equality - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    Equality (Incremental Track)

    Parallel Performance cvc5 - - + + @@ -124,7 +124,7 @@

    Equality (Incremental Track)

    - + 2020-z3n 0 1052391158150.9851158262.152467610892 @@ -133,7 +133,7 @@

    Equality (Incremental Track)

    - + z3-4.8.17n 0 1048181147915.3361148298.2752471820885 @@ -142,7 +142,7 @@

    Equality (Incremental Track)

    - + cvc5 0 23122985596.863985388.2853288780804 @@ -151,7 +151,7 @@

    Equality (Incremental Track)

    - + smtinterpol 0 191362060143.1572034591.15433286401598 @@ -160,7 +160,7 @@

    Equality (Incremental Track)

    - + UltimateEliminator+MathSAT 0 08811.3623907.31435200000 @@ -184,7 +184,6 @@

    Equality (Incremental Track)

    - + - diff --git a/archive/2022/results/equality-lineararith-cloud.html b/archive/2022/results/equality-lineararith-cloud.html index 47f8b08c..27921446 100644 --- a/archive/2022/results/equality-lineararith-cloud.html +++ b/archive/2022/results/equality-lineararith-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Equality+LinearArith (Cloud Track)

    Competition results for the Equality+LinearArith - + division - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    Equality+LinearArith (Cloud Track)

    - + Vampire 2 890412.5878087600 @@ -126,7 +126,7 @@

    Equality+LinearArith (Cloud Track)

    - + cvc5-cloud 3 2274739.913220226200 @@ -146,7 +146,7 @@

    Equality+LinearArith (Cloud Track)

    - + cvc5-cloud 0 00.00000840 @@ -155,7 +155,7 @@

    Equality+LinearArith (Cloud Track)

    - + Vampire 0 00.00000840 @@ -175,7 +175,7 @@

    Equality+LinearArith (Cloud Track)

    - + Vampire 2 829660.5580824520 @@ -184,7 +184,7 @@

    Equality+LinearArith (Cloud Track)

    - + cvc5-cloud 3 2212339.9132202210520 @@ -204,7 +204,7 @@

    Equality+LinearArith (Cloud Track)

    - + cvc5-cloud 0 191808.031901965065 @@ -213,7 +213,7 @@

    Equality+LinearArith (Cloud Track)

    - + Vampire 0 31995.08830381081 @@ -237,7 +237,6 @@

    Equality+LinearArith (Cloud Track)

    - + - diff --git a/archive/2022/results/equality-lineararith-incremental.html b/archive/2022/results/equality-lineararith-incremental.html index ea463d79..4f313094 100644 --- a/archive/2022/results/equality-lineararith-incremental.html +++ b/archive/2022/results/equality-lineararith-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Equality+LinearArith (Incremental Track)

    Competition results for the Equality+LinearArith - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    Equality+LinearArith (Incremental Track)

    Parallel Performance cvc5 - - + + @@ -124,7 +124,7 @@

    Equality+LinearArith (Incremental Track)

    - + 2021-z3n 0 530572143073.651142813.777259981049 @@ -133,7 +133,7 @@

    Equality+LinearArith (Incremental Track)

    - + z3-4.8.17n 0 525939144674.906144570.399264614050 @@ -142,7 +142,7 @@

    Equality+LinearArith (Incremental Track)

    - + cvc5 0 31027145812.90245675.133480282014 @@ -151,7 +151,7 @@

    Equality+LinearArith (Incremental Track)

    - + smtinterpol 0 302954385213.658377406.8584875990226 @@ -160,7 +160,7 @@

    Equality+LinearArith (Incremental Track)

    - + UltimateEliminator+MathSAT 0 19858111897.2078581.09259197201 @@ -184,7 +184,6 @@

    Equality+LinearArith (Incremental Track)

    - + - diff --git a/archive/2022/results/equality-lineararith-parallel.html b/archive/2022/results/equality-lineararith-parallel.html index d695801f..c56e59e6 100644 --- a/archive/2022/results/equality-lineararith-parallel.html +++ b/archive/2022/results/equality-lineararith-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Equality+LinearArith (Parallel Track)

    Competition results for the Equality+LinearArith - + division - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    Equality+LinearArith (Parallel Track)

    - + Vampire 3 794499.2117077700 @@ -137,7 +137,7 @@

    Equality+LinearArith (Parallel Track)

    - + Vampire 0 00.00000840 @@ -157,7 +157,7 @@

    Equality+LinearArith (Parallel Track)

    - + Vampire 3 75699.2117073740 @@ -177,7 +177,7 @@

    Equality+LinearArith (Parallel Track)

    - + Vampire 0 31950.35630381081 @@ -201,7 +201,6 @@

    Equality+LinearArith (Parallel Track)

    - + - diff --git a/archive/2022/results/equality-lineararith-proof-exhibition.html b/archive/2022/results/equality-lineararith-proof-exhibition.html index bcbd4089..b3eac5ab 100644 --- a/archive/2022/results/equality-lineararith-proof-exhibition.html +++ b/archive/2022/results/equality-lineararith-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Equality+LinearArith (Proof Exhibition Track)

    Competition results for the Equality+LinearArith - + division - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    Equality+LinearArith (Proof Exhibition Track)

    - + cvc5-lfsc 0 22802 @@ -130,7 +130,7 @@

    Equality+LinearArith (Proof Exhibition Track)

    - + cvc5 0 21092 @@ -141,7 +141,7 @@

    Equality+LinearArith (Proof Exhibition Track)

    - + smtinterpol 0 20564 @@ -152,7 +152,7 @@

    Equality+LinearArith (Proof Exhibition Track)

    - + veriT 0 14668 @@ -174,7 +174,7 @@

    Equality+LinearArith (Proof Exhibition Track)

    - + cvc5-lfsc 0 22802711913.563711734.1855900555 @@ -183,7 +183,7 @@

    Equality+LinearArith (Proof Exhibition Track)

    - + cvc5 0 210922516298.3322513019.762230001938 @@ -192,7 +192,7 @@

    Equality+LinearArith (Proof Exhibition Track)

    - + smtinterpol 0 205643513385.3433220654.16282802503 @@ -201,7 +201,7 @@

    Equality+LinearArith (Proof Exhibition Track)

    - + veriT 0 14668632737.433632551.9775418183491 @@ -225,7 +225,6 @@

    Equality+LinearArith (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/equality-lineararith-single-query.html b/archive/2022/results/equality-lineararith-single-query.html index 3b19745e..965c123c 100644 --- a/archive/2022/results/equality-lineararith-single-query.html +++ b/archive/2022/results/equality-lineararith-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Equality+LinearArith (Single Query Track)

    Competition results for the Equality+LinearArith - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    Equality+LinearArith (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    Equality+LinearArith (Single Query Track)

    - + cvc5 0 11328 @@ -142,7 +142,7 @@

    Equality+LinearArith (Single Query Track)

    - + z3-4.8.17n 0 10834 @@ -153,7 +153,7 @@

    Equality+LinearArith (Single Query Track)

    - + 2020-CVC4n 0 10801 @@ -164,7 +164,7 @@

    Equality+LinearArith (Single Query Track)

    - + Vampire 0 9979 @@ -175,7 +175,7 @@

    Equality+LinearArith (Single Query Track)

    - + veriT 0 4133 @@ -186,7 +186,7 @@

    Equality+LinearArith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 62 @@ -197,7 +197,7 @@

    Equality+LinearArith (Single Query Track)

    - + smtinterpol 1 8848 @@ -208,7 +208,7 @@

    Equality+LinearArith (Single Query Track)

    - + smtinterpol-fixedn 1 5614 @@ -230,7 +230,7 @@

    Equality+LinearArith (Single Query Track)

    - + cvc5 0 113283025773.2743028899.1811132875710571266602376 @@ -239,7 +239,7 @@

    Equality+LinearArith (Single Query Track)

    - + z3-4.8.17n 0 108342625230.3132638024.6281083480210032316001890 @@ -248,7 +248,7 @@

    Equality+LinearArith (Single Query Track)

    - + 2020-CVC4n 0 108012244130.0332246717.2611080127910522319301742 @@ -257,7 +257,7 @@

    Equality+LinearArith (Single Query Track)

    - + Vampire 0 102366438928.0634771900.7051023610810128375803690 @@ -266,7 +266,7 @@

    Equality+LinearArith (Single Query Track)

    - + veriT 0 41332298973.9512298825.364413304133208277791804 @@ -275,7 +275,7 @@

    Equality+LinearArith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 6275855.046690.554629531393203 @@ -284,7 +284,7 @@

    Equality+LinearArith (Single Query Track)

    - + smtinterpol 1 88544872680.8624718004.12388545828272514003788 @@ -293,7 +293,7 @@

    Equality+LinearArith (Single Query Track)

    - + smtinterpol-fixedn 1 56172228095.3522159474.4665617815536209662811718 @@ -313,7 +313,7 @@

    Equality+LinearArith (Single Query Track)

    - + z3-4.8.17n 0 802114219.322114217.289802802096130961890 @@ -322,7 +322,7 @@

    Equality+LinearArith (Single Query Track)

    - + cvc5 0 757126334.0126929.6097577570141130962376 @@ -331,7 +331,7 @@

    Equality+LinearArith (Single Query Track)

    - + 2020-CVC4n 0 279114910.477115308.1912792790619130961742 @@ -340,7 +340,7 @@

    Equality+LinearArith (Single Query Track)

    - + Vampire 0 1081080290.29948533.1391081080790130963690 @@ -349,7 +349,7 @@

    Equality+LinearArith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 94332.2992561.314990889130963 @@ -358,7 +358,7 @@

    Equality+LinearArith (Single Query Track)

    - + veriT 0 0104406.377104406.402000223137711804 @@ -367,7 +367,7 @@

    Equality+LinearArith (Single Query Track)

    - + smtinterpol 1 58281909.52280887.5225825820316130963788 @@ -376,7 +376,7 @@

    Equality+LinearArith (Single Query Track)

    - + smtinterpol-fixedn 1 81121756.745121089.54781810220136931718 @@ -396,7 +396,7 @@

    Equality+LinearArith (Single Query Track)

    - + cvc5 0 10571308055.4310560.8831057101057115232712376 @@ -405,7 +405,7 @@

    Equality+LinearArith (Single Query Track)

    - + 2020-CVC4n 0 10522325548.669327726.9451052201052220132711742 @@ -414,7 +414,7 @@

    Equality+LinearArith (Single Query Track)

    - + Vampire 0 101282082603.313976189.9461012801012859532713690 @@ -423,7 +423,7 @@

    Equality+LinearArith (Single Query Track)

    - + z3-4.8.17n 0 10032652388.077657702.3791003201003269132711890 @@ -432,7 +432,7 @@

    Equality+LinearArith (Single Query Track)

    - + smtinterpol 0 82722885440.2792806583.955827208272245132713788 @@ -441,7 +441,7 @@

    Equality+LinearArith (Single Query Track)

    - + smtinterpol-fixedn 0 55361369197.831325593.588553605536109473641718 @@ -450,7 +450,7 @@

    Equality+LinearArith (Single Query Track)

    - + veriT 0 4133567115.11567069.7241330413345394081804 @@ -459,7 +459,7 @@

    Equality+LinearArith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 5358405.30636271.8530531067032713 @@ -479,7 +479,7 @@

    Equality+LinearArith (Single Query Track)

    - + z3-4.8.17n 0 1076366010.11165990.625107637929971323102668 @@ -488,7 +488,7 @@

    Equality+LinearArith (Single Query Track)

    - + cvc5 0 1068279553.96679514.3991068266110021331203091 @@ -497,7 +497,7 @@

    Equality+LinearArith (Single Query Track)

    - + 2020-CVC4n 0 1019659854.0159813.3431019619010006379802368 @@ -506,7 +506,7 @@

    Equality+LinearArith (Single Query Track)

    - + Vampire 0 8639161716.514136737.6128639968543535505295 @@ -515,7 +515,7 @@

    Equality+LinearArith (Single Query Track)

    - + smtinterpol 0 8525140824.836118899.66585255827943546904307 @@ -524,7 +524,7 @@

    Equality+LinearArith (Single Query Track)

    - + smtinterpol-fixedn 0 538070115.1957927.5555380815299233362812043 @@ -533,7 +533,7 @@

    Equality+LinearArith (Single Query Track)

    - + veriT 0 403850975.0450940.083403804038217777791938 @@ -542,7 +542,7 @@

    Equality+LinearArith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 6269830.81541209.1736295313932031 @@ -566,7 +566,6 @@

    Equality+LinearArith (Single Query Track)

    - + - diff --git a/archive/2022/results/equality-lineararith-unsat-core.html b/archive/2022/results/equality-lineararith-unsat-core.html index 7bb5fa99..b5bf66ad 100644 --- a/archive/2022/results/equality-lineararith-unsat-core.html +++ b/archive/2022/results/equality-lineararith-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Equality+LinearArith (Unsat Core Track)

    Competition results for the Equality+LinearArith - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    Equality+LinearArith (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    Equality+LinearArith (Unsat Core Track)

    - + 2021-cvc5-ucn 0 1480936 @@ -137,7 +137,7 @@

    Equality+LinearArith (Unsat Core Track)

    - + z3-4.8.17n 0 1428341 @@ -148,7 +148,7 @@

    Equality+LinearArith (Unsat Core Track)

    - + cvc5 0 1336111 @@ -159,7 +159,7 @@

    Equality+LinearArith (Unsat Core Track)

    - + Vampire 0 1181277 @@ -170,7 +170,7 @@

    Equality+LinearArith (Unsat Core Track)

    - + smtinterpol 0 1093221 @@ -181,7 +181,7 @@

    Equality+LinearArith (Unsat Core Track)

    - + UltimateEliminator+MathSAT 3 9052 @@ -203,7 +203,7 @@

    Equality+LinearArith (Unsat Core Track)

    - + 2021-cvc5-ucn 0 1480936708944.388710186.816565 @@ -212,7 +212,7 @@

    Equality+LinearArith (Unsat Core Track)

    - + z3-4.8.17n 0 1428341682861.574683742.951470 @@ -221,7 +221,7 @@

    Equality+LinearArith (Unsat Core Track)

    - + cvc5 0 1336111724964.054724872.634573 @@ -230,7 +230,7 @@

    Equality+LinearArith (Unsat Core Track)

    - + Vampire 0 11932992059716.786963128.245518 @@ -239,7 +239,7 @@

    Equality+LinearArith (Unsat Core Track)

    - + smtinterpol 0 10939933231786.8343135362.762493 @@ -248,7 +248,7 @@

    Equality+LinearArith (Unsat Core Track)

    - + UltimateEliminator+MathSAT 3 9052131035.89483912.0498 @@ -272,7 +272,6 @@

    Equality+LinearArith (Unsat Core Track)

    - + - diff --git a/archive/2022/results/equality-machinearith-incremental.html b/archive/2022/results/equality-machinearith-incremental.html index 2f1a99a8..2f4cb312 100644 --- a/archive/2022/results/equality-machinearith-incremental.html +++ b/archive/2022/results/equality-machinearith-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Equality+MachineArith (Incremental Track)

    Competition results for the Equality+MachineArith - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    Equality+MachineArith (Incremental Track)

    Parallel Performance UltimateEliminator+MathSAT - - + + @@ -124,7 +124,7 @@

    Equality+MachineArith (Incremental Track)

    - + UltimateEliminator+MathSAT 0 818106.3781.544145100 @@ -133,7 +133,7 @@

    Equality+MachineArith (Incremental Track)

    - + cvc5 0 8181208.7891208.724145101 @@ -142,7 +142,7 @@

    Equality+MachineArith (Incremental Track)

    - + Bitwuzla 0 552.4432.593221400 @@ -166,7 +166,6 @@

    Equality+MachineArith (Incremental Track)

    - + - diff --git a/archive/2022/results/equality-machinearith-proof-exhibition.html b/archive/2022/results/equality-machinearith-proof-exhibition.html index bbee38ab..d11507b8 100644 --- a/archive/2022/results/equality-machinearith-proof-exhibition.html +++ b/archive/2022/results/equality-machinearith-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Equality+MachineArith (Proof Exhibition Track)

    Competition results for the Equality+MachineArith - + division - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    Equality+MachineArith (Proof Exhibition Track)

    - + cvc5-lfsc 0 655 @@ -130,7 +130,7 @@

    Equality+MachineArith (Proof Exhibition Track)

    - + cvc5 0 461 @@ -152,7 +152,7 @@

    Equality+MachineArith (Proof Exhibition Track)

    - + cvc5-lfsc 0 6551554977.481554961.51215420640 @@ -161,7 +161,7 @@

    Equality+MachineArith (Proof Exhibition Track)

    - + cvc5 0 4611780981.0361780822.05317360794 @@ -185,7 +185,6 @@

    Equality+MachineArith (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/equality-machinearith-single-query.html b/archive/2022/results/equality-machinearith-single-query.html index 6a820ed5..c82186c6 100644 --- a/archive/2022/results/equality-machinearith-single-query.html +++ b/archive/2022/results/equality-machinearith-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Equality+MachineArith (Single Query Track)

    Competition results for the Equality+MachineArith - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    Equality+MachineArith (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    Equality+MachineArith (Single Query Track)

    - + z3-4.8.17n 0 1460 @@ -142,7 +142,7 @@

    Equality+MachineArith (Single Query Track)

    - + cvc5 0 1385 @@ -153,7 +153,7 @@

    Equality+MachineArith (Single Query Track)

    - + 2021-cvc5n 0 994 @@ -164,7 +164,7 @@

    Equality+MachineArith (Single Query Track)

    - + Bitwuzla 0 424 @@ -175,7 +175,7 @@

    Equality+MachineArith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 34 @@ -197,7 +197,7 @@

    Equality+MachineArith (Single Query Track)

    - + z3-4.8.17n 0 14602655764.5932655748.33914603361124235202128 @@ -206,7 +206,7 @@

    Equality+MachineArith (Single Query Track)

    - + cvc5 0 13852795672.0782805243.11413853331052242702148 @@ -215,7 +215,7 @@

    Equality+MachineArith (Single Query Track)

    - + 2021-cvc5n 0 9941798694.0831804225.128994249745159712211378 @@ -224,7 +224,7 @@

    Equality+MachineArith (Single Query Track)

    - + Bitwuzla 0 424671018.952671026.074424214038132575539 @@ -233,7 +233,7 @@

    Equality+MachineArith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 341057163.2111047799.43334201437780840 @@ -253,7 +253,7 @@

    Equality+MachineArith (Single Query Track)

    - + z3-4.8.17n 0 336222004.033221984.356336336019032862128 @@ -262,7 +262,7 @@

    Equality+MachineArith (Single Query Track)

    - + cvc5 0 333197600.975201658.285333333019332862148 @@ -271,7 +271,7 @@

    Equality+MachineArith (Single Query Track)

    - + 2021-cvc5n 0 249280957.502283684.696249249027732861378 @@ -280,7 +280,7 @@

    Equality+MachineArith (Single Query Track)

    - + Bitwuzla 0 21113034.207113035.605212102313560539 @@ -289,7 +289,7 @@

    Equality+MachineArith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 2087625.15686061.619202005063286840 @@ -309,7 +309,7 @@

    Equality+MachineArith (Single Query Track)

    - + z3-4.8.17n 0 11241197772.5591197678.154112401124103316552128 @@ -318,7 +318,7 @@

    Equality+MachineArith (Single Query Track)

    - + cvc5 0 10521354750.3931358800.527105201052110516552148 @@ -327,7 +327,7 @@

    Equality+MachineArith (Single Query Track)

    - + 2021-cvc5n 0 745271140.153272994.576745074521528521378 @@ -336,7 +336,7 @@

    Equality+MachineArith (Single Query Track)

    - + Bitwuzla 0 40369584.29169589.0674030403723337539 @@ -345,7 +345,7 @@

    Equality+MachineArith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 14371820.731367233.1551401421431655840 @@ -365,7 +365,7 @@

    Equality+MachineArith (Single Query Track)

    - + z3-4.8.17n 0 128461644.46961548.8031284303981252802436 @@ -374,7 +374,7 @@

    Equality+MachineArith (Single Query Track)

    - + cvc5 0 99868259.57868241.74899888910281402770 @@ -383,7 +383,7 @@

    Equality+MachineArith (Single Query Track)

    - + 2021-cvc5n 0 74345061.01345033.79274388655184812211822 @@ -392,7 +392,7 @@

    Equality+MachineArith (Single Query Track)

    - + Bitwuzla 0 31216573.36916576.101312192939252575651 @@ -401,7 +401,7 @@

    Equality+MachineArith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 2937924.50932339.98629181137830977 @@ -425,7 +425,6 @@

    Equality+MachineArith (Single Query Track)

    - + - diff --git a/archive/2022/results/equality-machinearith-unsat-core.html b/archive/2022/results/equality-machinearith-unsat-core.html index 5cfe22fe..ecaf836b 100644 --- a/archive/2022/results/equality-machinearith-unsat-core.html +++ b/archive/2022/results/equality-machinearith-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Equality+MachineArith (Unsat Core Track)

    Competition results for the Equality+MachineArith - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    Equality+MachineArith (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    Equality+MachineArith (Unsat Core Track)

    - + z3-4.8.17n 0 88967 @@ -137,7 +137,7 @@

    Equality+MachineArith (Unsat Core Track)

    - + cvc5 0 49431 @@ -148,7 +148,7 @@

    Equality+MachineArith (Unsat Core Track)

    - + 2021-cvc5-ucn 0 6353 @@ -159,7 +159,7 @@

    Equality+MachineArith (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -181,7 +181,7 @@

    Equality+MachineArith (Unsat Core Track)

    - + z3-4.8.17n 0 88967755692.666755694.733545 @@ -190,7 +190,7 @@

    Equality+MachineArith (Unsat Core Track)

    - + cvc5 0 494311239434.591239424.811028 @@ -199,7 +199,7 @@

    Equality+MachineArith (Unsat Core Track)

    - + 2021-cvc5-ucn 0 635366199.08566221.60254 @@ -208,7 +208,7 @@

    Equality+MachineArith (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 07970.4644754.8410 @@ -232,7 +232,6 @@

    Equality+MachineArith (Unsat Core Track)

    - + - diff --git a/archive/2022/results/equality-nonlineararith-cloud.html b/archive/2022/results/equality-nonlineararith-cloud.html index 2812477b..6434ac15 100644 --- a/archive/2022/results/equality-nonlineararith-cloud.html +++ b/archive/2022/results/equality-nonlineararith-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Equality+NonLinearArith (Cloud Track)

    Competition results for the Equality+NonLinearArith - + division - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    Equality+NonLinearArith (Cloud Track)

    - + Vampire 0 594497.5525057800 @@ -126,7 +126,7 @@

    Equality+NonLinearArith (Cloud Track)

    - + cvc5-cloud 2 297247.8092028100 @@ -146,7 +146,7 @@

    Equality+NonLinearArith (Cloud Track)

    - + cvc5-cloud 0 00.00000830 @@ -155,7 +155,7 @@

    Equality+NonLinearArith (Cloud Track)

    - + Vampire 0 00.00000830 @@ -175,7 +175,7 @@

    Equality+NonLinearArith (Cloud Track)

    - + Vampire 0 53297.5525052760 @@ -184,7 +184,7 @@

    Equality+NonLinearArith (Cloud Track)

    - + cvc5-cloud 2 26047.8092025760 @@ -204,7 +204,7 @@

    Equality+NonLinearArith (Cloud Track)

    - + Vampire 0 21977.29920281081 @@ -213,7 +213,7 @@

    Equality+NonLinearArith (Cloud Track)

    - + cvc5-cloud 0 11982.65510182082 @@ -237,7 +237,6 @@

    Equality+NonLinearArith (Cloud Track)

    - + - diff --git a/archive/2022/results/equality-nonlineararith-incremental.html b/archive/2022/results/equality-nonlineararith-incremental.html index c0a2c565..5d8d6126 100644 --- a/archive/2022/results/equality-nonlineararith-incremental.html +++ b/archive/2022/results/equality-nonlineararith-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Equality+NonLinearArith (Incremental Track)

    Competition results for the Equality+NonLinearArith - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    Equality+NonLinearArith (Incremental Track)

    Parallel Performance cvc5 - - + + @@ -124,7 +124,7 @@

    Equality+NonLinearArith (Incremental Track)

    - + z3-4.8.17n 0 994811387757.9611387620.7626022201014 @@ -133,7 +133,7 @@

    Equality+NonLinearArith (Incremental Track)

    - + 2020-z3n 0 986881394329.9211394300.65626101501018 @@ -142,7 +142,7 @@

    Equality+NonLinearArith (Incremental Track)

    - + cvc5 0 579351062710.5881062533.3363260020847 @@ -151,7 +151,7 @@

    Equality+NonLinearArith (Incremental Track)

    - + smtinterpol 0 389041334557.4021318035.65134503301062 @@ -160,7 +160,7 @@

    Equality+NonLinearArith (Incremental Track)

    - + UltimateEliminator+MathSAT 0 1328413087.2197442.21537065302 @@ -184,7 +184,6 @@

    Equality+NonLinearArith (Incremental Track)

    - + - diff --git a/archive/2022/results/equality-nonlineararith-parallel.html b/archive/2022/results/equality-nonlineararith-parallel.html index f1ffbe5e..d5127bdd 100644 --- a/archive/2022/results/equality-nonlineararith-parallel.html +++ b/archive/2022/results/equality-nonlineararith-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Equality+NonLinearArith (Parallel Track)

    Competition results for the Equality+NonLinearArith - + division - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    Equality+NonLinearArith (Parallel Track)

    - + Vampire 0 695288.6836067700 @@ -137,7 +137,7 @@

    Equality+NonLinearArith (Parallel Track)

    - + Vampire 0 00.00000830 @@ -157,7 +157,7 @@

    Equality+NonLinearArith (Parallel Track)

    - + Vampire 0 62888.6836060770 @@ -177,7 +177,7 @@

    Equality+NonLinearArith (Parallel Track)

    - + Vampire 0 11969.4310182082 @@ -201,7 +201,6 @@

    Equality+NonLinearArith (Parallel Track)

    - + - diff --git a/archive/2022/results/equality-nonlineararith-proof-exhibition.html b/archive/2022/results/equality-nonlineararith-proof-exhibition.html index 8356b01a..7c644622 100644 --- a/archive/2022/results/equality-nonlineararith-proof-exhibition.html +++ b/archive/2022/results/equality-nonlineararith-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Equality+NonLinearArith (Proof Exhibition Track)

    Competition results for the Equality+NonLinearArith - + division - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    Equality+NonLinearArith (Proof Exhibition Track)

    - + cvc5-lfsc 0 6056 @@ -130,7 +130,7 @@

    Equality+NonLinearArith (Proof Exhibition Track)

    - + cvc5 0 4772 @@ -152,7 +152,7 @@

    Equality+NonLinearArith (Proof Exhibition Track)

    - + cvc5-lfsc 0 6056997057.858997002.7147730766 @@ -161,7 +161,7 @@

    Equality+NonLinearArith (Proof Exhibition Track)

    - + cvc5 0 47722242555.9942239451.776205701721 @@ -185,7 +185,6 @@

    Equality+NonLinearArith (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/equality-nonlineararith-single-query.html b/archive/2022/results/equality-nonlineararith-single-query.html index d68b08fd..ea305a10 100644 --- a/archive/2022/results/equality-nonlineararith-single-query.html +++ b/archive/2022/results/equality-nonlineararith-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Equality+NonLinearArith (Single Query Track)

    Competition results for the Equality+NonLinearArith - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    Equality+NonLinearArith (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + cvc5 0 6135 @@ -142,7 +142,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + 2020-CVC4n 0 6055 @@ -153,7 +153,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + z3-4.8.17n 0 5327 @@ -164,7 +164,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + Vampire 0 4804 @@ -175,7 +175,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 568 @@ -197,7 +197,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + cvc5 0 61364009250.8464011556.09361366945442335703226 @@ -206,7 +206,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + 2020-CVC4n 0 60553827621.7593833071.7160556925363343803064 @@ -215,7 +215,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + z3-4.8.17n 0 53273478506.4923482130.41953276274700416602486 @@ -224,7 +224,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + Vampire 0 51247107011.6385492638.325512405124436904310 @@ -233,7 +233,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 568649288.534630177.03956839717189250480 @@ -253,7 +253,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + cvc5 0 69459103.69659205.11869469404687533226 @@ -262,7 +262,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + 2020-CVC4n 0 69247700.6448095.22469269204887533064 @@ -271,7 +271,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + z3-4.8.17n 0 627102098.653102088.04627627011387532486 @@ -280,7 +280,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 397328504.159327522.33439739703438753480 @@ -289,7 +289,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + Vampire 0 0992404.87887899.4300074087534310 @@ -309,7 +309,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + cvc5 0 5442586128.77588333.52854420544241836333226 @@ -318,7 +318,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + 2020-CVC4n 0 5363688713.053693696.32953630536349736333064 @@ -327,7 +327,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + Vampire 0 51242224514.1681134378.35751240512473636334310 @@ -336,7 +336,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + z3-4.8.17n 0 4700874481.81874462.811470004700116036332486 @@ -345,7 +345,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 171194602.027181883.204171017156893633480 @@ -365,7 +365,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + cvc5 0 539699417.41499378.22953966254771409703973 @@ -374,7 +374,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + 2020-CVC4n 0 536895000.55794980.11753686014767412503827 @@ -383,7 +383,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + z3-4.8.17n 0 516299359.60599251.151626054557433103878 @@ -392,7 +392,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + Vampire 0 3429160477.643149260.373342903429606406034 @@ -401,7 +401,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 53458824.46340920.70253436417089590576 @@ -425,7 +425,6 @@

    Equality+NonLinearArith (Single Query Track)

    - + - diff --git a/archive/2022/results/equality-nonlineararith-unsat-core.html b/archive/2022/results/equality-nonlineararith-unsat-core.html index af902e33..a18ab74a 100644 --- a/archive/2022/results/equality-nonlineararith-unsat-core.html +++ b/archive/2022/results/equality-nonlineararith-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Equality+NonLinearArith (Unsat Core Track)

    Competition results for the Equality+NonLinearArith - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    Equality+NonLinearArith (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    Equality+NonLinearArith (Unsat Core Track)

    - + 2020-CVC4-ucn 0 225070 @@ -137,7 +137,7 @@

    Equality+NonLinearArith (Unsat Core Track)

    - + cvc5 0 222083 @@ -148,7 +148,7 @@

    Equality+NonLinearArith (Unsat Core Track)

    - + z3-4.8.17n 0 197335 @@ -159,7 +159,7 @@

    Equality+NonLinearArith (Unsat Core Track)

    - + Vampire 0 144076 @@ -170,7 +170,7 @@

    Equality+NonLinearArith (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 1489 @@ -192,7 +192,7 @@

    Equality+NonLinearArith (Unsat Core Track)

    - + 2020-CVC4-ucn 0 2250701002817.0721003577.863794 @@ -201,7 +201,7 @@

    Equality+NonLinearArith (Unsat Core Track)

    - + cvc5 0 2220831005732.91005694.769772 @@ -210,7 +210,7 @@

    Equality+NonLinearArith (Unsat Core Track)

    - + z3-4.8.17n 0 197335786998.774787571.335550 @@ -219,7 +219,7 @@

    Equality+NonLinearArith (Unsat Core Track)

    - + Vampire 0 1526122126453.3991086096.546636 @@ -228,7 +228,7 @@

    Equality+NonLinearArith (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 1489280649.569263853.036163 @@ -252,7 +252,6 @@

    Equality+NonLinearArith (Unsat Core Track)

    - + - diff --git a/archive/2022/results/equality-parallel.html b/archive/2022/results/equality-parallel.html index 7f28aa94..88020e7d 100644 --- a/archive/2022/results/equality-parallel.html +++ b/archive/2022/results/equality-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Equality (Parallel Track)

    Competition results for the Equality - + division - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    Equality (Parallel Track)

    - + Vampire 0 2864641.321287215100 @@ -137,7 +137,7 @@

    Equality (Parallel Track)

    - + Vampire 0 71761.8597700720 @@ -157,7 +157,7 @@

    Equality (Parallel Track)

    - + Vampire 0 211679.462210210580 @@ -177,7 +177,7 @@

    Equality (Parallel Track)

    - + Vampire 0 151606.4871511464064 @@ -201,7 +201,6 @@

    Equality (Parallel Track)

    - + - diff --git a/archive/2022/results/equality-proof-exhibition.html b/archive/2022/results/equality-proof-exhibition.html index 5ab82610..27f54a68 100644 --- a/archive/2022/results/equality-proof-exhibition.html +++ b/archive/2022/results/equality-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Equality (Proof Exhibition Track)

    Competition results for the Equality - + division - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    Equality (Proof Exhibition Track)

    - + cvc5-lfsc 0 2653 @@ -130,7 +130,7 @@

    Equality (Proof Exhibition Track)

    - + cvc5 0 2616 @@ -141,7 +141,7 @@

    Equality (Proof Exhibition Track)

    - + smtinterpol 0 1632 @@ -152,7 +152,7 @@

    Equality (Proof Exhibition Track)

    - + veriT 0 1594 @@ -174,7 +174,7 @@

    Equality (Proof Exhibition Track)

    - + cvc5-lfsc 0 2653339756.531339758.3462850270 @@ -183,7 +183,7 @@

    Equality (Proof Exhibition Track)

    - + cvc5 0 2616389655.896389527.383220307 @@ -192,7 +192,7 @@

    Equality (Proof Exhibition Track)

    - + smtinterpol 0 16322017720.1041521631.519130601060 @@ -201,7 +201,7 @@

    Equality (Proof Exhibition Track)

    - + veriT 0 1594218953.346218942.151901154150 @@ -225,7 +225,6 @@

    Equality (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/equality-single-query.html b/archive/2022/results/equality-single-query.html index 46c6085f..27f4fa7e 100644 --- a/archive/2022/results/equality-single-query.html +++ b/archive/2022/results/equality-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Equality (Single Query Track)

    Competition results for the Equality - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    Equality (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + Vampire - + @@ -131,7 +131,7 @@

    Equality (Single Query Track)

    - + cvc5 0 1689 @@ -142,7 +142,7 @@

    Equality (Single Query Track)

    - + 2020-CVC4n 0 1685 @@ -153,7 +153,7 @@

    Equality (Single Query Track)

    - + Vampire 0 1531 @@ -164,7 +164,7 @@

    Equality (Single Query Track)

    - + z3-4.8.17n 0 719 @@ -175,7 +175,7 @@

    Equality (Single Query Track)

    - + veriT 0 669 @@ -186,7 +186,7 @@

    Equality (Single Query Track)

    - + Yices2 0 345 @@ -197,7 +197,7 @@

    Equality (Single Query Track)

    - + smtinterpol 0 296 @@ -208,7 +208,7 @@

    Equality (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -230,7 +230,7 @@

    Equality (Single Query Track)

    - + cvc5 0 16893560323.4263565900.7716895141175271802718 @@ -239,7 +239,7 @@

    Equality (Single Query Track)

    - + 2020-CVC4n 0 16853560590.6273566401.26116855211164272202722 @@ -248,7 +248,7 @@

    Equality (Single Query Track)

    - + Vampire 0 16214294809.63419742.65916214871134278602770 @@ -257,7 +257,7 @@

    Equality (Single Query Track)

    - + z3-4.8.17n 0 7193424886.9933431638.83771973646368802225 @@ -266,7 +266,7 @@

    Equality (Single Query Track)

    - + veriT 0 6692599411.7432599189.2156690669218815502050 @@ -275,7 +275,7 @@

    Equality (Single Query Track)

    - + Yices2 0 3453028480.453028452.17934539306251215502512 @@ -284,7 +284,7 @@

    Equality (Single Query Track)

    - + smtinterpol 0 2995059928.9094843219.9892998291410803929 @@ -293,7 +293,7 @@

    Equality (Single Query Track)

    - + UltimateEliminator+MathSAT 0 022879.40713970.289000440700 @@ -313,7 +313,7 @@

    Equality (Single Query Track)

    - + 2020-CVC4n 0 521358850.633364051.41352152109537912722 @@ -322,7 +322,7 @@

    Equality (Single Query Track)

    - + cvc5 0 514374640.722379756.213514514010237912718 @@ -331,7 +331,7 @@

    Equality (Single Query Track)

    - + Vampire 0 487280621.388171160.717487487012937912770 @@ -340,7 +340,7 @@

    Equality (Single Query Track)

    - + z3-4.8.17n 0 73495639.774498175.4387373054337912225 @@ -349,7 +349,7 @@

    Equality (Single Query Track)

    - + Yices2 0 39539261.894539262.0613939044939192512 @@ -358,7 +358,7 @@

    Equality (Single Query Track)

    - + smtinterpol 0 8716246.572686609.70188060837913929 @@ -367,7 +367,7 @@

    Equality (Single Query Track)

    - + UltimateEliminator+MathSAT 0 03033.0891778.47800061637910 @@ -376,7 +376,7 @@

    Equality (Single Query Track)

    - + veriT 0 0547095.18546988.79800048839192050 @@ -396,7 +396,7 @@

    Equality (Single Query Track)

    - + cvc5 0 1175185682.705186144.55811750117511631162718 @@ -405,7 +405,7 @@

    Equality (Single Query Track)

    - + 2020-CVC4n 0 1164201739.994202349.84811640116412731162722 @@ -414,7 +414,7 @@

    Equality (Single Query Track)

    - + Vampire 0 1134488562.652249641.32211340113415731162770 @@ -423,7 +423,7 @@

    Equality (Single Query Track)

    - + veriT 0 669215249.502215133.268669066916635722050 @@ -432,7 +432,7 @@

    Equality (Single Query Track)

    - + z3-4.8.17n 0 646609890.636610396.941646064664531162225 @@ -441,7 +441,7 @@

    Equality (Single Query Track)

    - + Yices2 0 306648418.555648390.119306030652935722512 @@ -450,7 +450,7 @@

    Equality (Single Query Track)

    - + smtinterpol 0 2911263512.0121214907.3482910291100031163929 @@ -459,7 +459,7 @@

    Equality (Single Query Track)

    - + UltimateEliminator+MathSAT 0 07552.1724944.395000129131160 @@ -479,7 +479,7 @@

    Equality (Single Query Track)

    - + Vampire 0 121287205.78379222.6071212371841319503181 @@ -488,7 +488,7 @@

    Equality (Single Query Track)

    - + cvc5 0 94484076.77484076.82994413931346303463 @@ -497,7 +497,7 @@

    Equality (Single Query Track)

    - + 2020-CVC4n 0 92184769.64884767.01392111910348603486 @@ -506,7 +506,7 @@

    Equality (Single Query Track)

    - + z3-4.8.17n 0 64890694.32390693.264868580375903690 @@ -515,7 +515,7 @@

    Equality (Single Query Track)

    - + veriT 0 62254013.38754002.3546220622223515502142 @@ -524,7 +524,7 @@

    Equality (Single Query Track)

    - + Yices2 0 28362265.26462265.54128336247257415502574 @@ -533,7 +533,7 @@

    Equality (Single Query Track)

    - + smtinterpol 0 176101968.313100894.8951766170423104167 @@ -542,7 +542,7 @@

    Equality (Single Query Track)

    - + UltimateEliminator+MathSAT 0 021703.40712794.289000440700 @@ -566,7 +566,6 @@

    Equality (Single Query Track)

    - + - diff --git a/archive/2022/results/equality-unsat-core.html b/archive/2022/results/equality-unsat-core.html index f6a76cb4..9afbeefc 100644 --- a/archive/2022/results/equality-unsat-core.html +++ b/archive/2022/results/equality-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Equality (Unsat Core Track)

    Competition results for the Equality - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    Equality (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5Vampire - - + + @@ -126,7 +126,7 @@

    Equality (Unsat Core Track)

    - + 2020-CVC4-ucn 0 825656 @@ -137,7 +137,7 @@

    Equality (Unsat Core Track)

    - + cvc5 0 823606 @@ -148,7 +148,7 @@

    Equality (Unsat Core Track)

    - + Vampire 0 820883 @@ -159,7 +159,7 @@

    Equality (Unsat Core Track)

    - + z3-4.8.17n 0 711172 @@ -170,7 +170,7 @@

    Equality (Unsat Core Track)

    - + smtinterpol 0 495549 @@ -181,7 +181,7 @@

    Equality (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 1 @@ -203,7 +203,7 @@

    Equality (Unsat Core Track)

    - + Vampire 0 843995499517.049252608.762149 @@ -212,7 +212,7 @@

    Equality (Unsat Core Track)

    - + 2020-CVC4-ucn 0 825656409253.264409326.163331 @@ -221,7 +221,7 @@

    Equality (Unsat Core Track)

    - + cvc5 0 823606381483.44381444.614310 @@ -230,7 +230,7 @@

    Equality (Unsat Core Track)

    - + z3-4.8.17n 0 711172604377.44605072.576360 @@ -239,7 +239,7 @@

    Equality (Unsat Core Track)

    - + smtinterpol 0 4980061733255.671636984.2551283 @@ -248,7 +248,7 @@

    Equality (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 114549.7658476.6150 @@ -272,7 +272,6 @@

    Equality (Unsat Core Track)

    - + - diff --git a/archive/2022/results/fp-proof-exhibition.html b/archive/2022/results/fp-proof-exhibition.html index 8db2ad38..7e9105d2 100644 --- a/archive/2022/results/fp-proof-exhibition.html +++ b/archive/2022/results/fp-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    FP (Proof Exhibition Track)

    Competition results for the FP - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    FP (Proof Exhibition Track)

    - + cvc5-lfsc 0 287 @@ -130,7 +130,7 @@

    FP (Proof Exhibition Track)

    - + cvc5 0 262 @@ -152,7 +152,7 @@

    FP (Proof Exhibition Track)

    - + cvc5-lfsc 0 2871044695.2251044683.025870867 @@ -161,7 +161,7 @@

    FP (Proof Exhibition Track)

    - + cvc5 0 2621055535.2211055400.883895873 @@ -185,7 +185,6 @@

    FP (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/fp-single-query.html b/archive/2022/results/fp-single-query.html index 4d0e4e14..c7f660d9 100644 --- a/archive/2022/results/fp-single-query.html +++ b/archive/2022/results/fp-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    FP (Single Query Track)

    Competition results for the FP - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    FP (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) BitwuzlaBitwuzlaBitwuzla - - + + Bitwuzla - - + + Bitwuzla - + @@ -131,7 +131,7 @@

    FP (Single Query Track)

    - + Bitwuzla 0 1273 @@ -142,7 +142,7 @@

    FP (Single Query Track)

    - + cvc5 0 1193 @@ -153,7 +153,7 @@

    FP (Single Query Track)

    - + 2021-cvc5n 0 1192 @@ -164,7 +164,7 @@

    FP (Single Query Track)

    - + z3-4.8.17n 0 1129 @@ -175,7 +175,7 @@

    FP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 170 @@ -197,7 +197,7 @@

    FP (Single Query Track)

    - + Bitwuzla 0 127399624.19899569.554127312211516161 @@ -206,7 +206,7 @@

    FP (Single Query Track)

    - + cvc5 0 1193211944.095212517.49311931061087141141 @@ -215,7 +215,7 @@

    FP (Single Query Track)

    - + 2021-cvc5n 0 1192216056.443216479.56911921001092142142 @@ -224,7 +224,7 @@

    FP (Single Query Track)

    - + z3-4.8.17n 0 1129302514.552302452.1541129851044205200 @@ -233,7 +233,7 @@

    FP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 17040930.13539231.4051701169116430 @@ -253,7 +253,7 @@

    FP (Single Query Track)

    - + Bitwuzla 0 12220286.12120283.35412212201121161 @@ -262,7 +262,7 @@

    FP (Single Query Track)

    - + cvc5 0 10649123.96448860.8281061060171211141 @@ -271,7 +271,7 @@

    FP (Single Query Track)

    - + 2021-cvc5n 0 10050600.2850648.6781001000231211142 @@ -280,7 +280,7 @@

    FP (Single Query Track)

    - + z3-4.8.17n 0 8570038.64570042.88685850381211200 @@ -289,7 +289,7 @@

    FP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 1539.036833.925110122121130 @@ -309,7 +309,7 @@

    FP (Single Query Track)

    - + Bitwuzla 0 115119338.07819286.2115101151617761 @@ -318,7 +318,7 @@

    FP (Single Query Track)

    - + 2021-cvc5n 0 1092100656.163101030.89210920109265177142 @@ -327,7 +327,7 @@

    FP (Single Query Track)

    - + cvc5 0 1087104020.132104856.66510870108770177141 @@ -336,7 +336,7 @@

    FP (Single Query Track)

    - + z3-4.8.17n 0 1044173620.936173554.295104401044113177200 @@ -345,7 +345,7 @@

    FP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 16921012.81219085.312169016998817730 @@ -365,7 +365,7 @@

    FP (Single Query Track)

    - + Bitwuzla 0 11265975.7635969.0191126331093208208 @@ -374,7 +374,7 @@

    FP (Single Query Track)

    - + cvc5 0 10508624.4228595.1751050451005284284 @@ -383,7 +383,7 @@

    FP (Single Query Track)

    - + 2021-cvc5n 0 10458662.7148644.81045401005289289 @@ -392,7 +392,7 @@

    FP (Single Query Track)

    - + z3-4.8.17n 0 89412196.61612172.0538940894440435 @@ -401,7 +401,7 @@

    FP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 1687377.4945409.7421681167116637 @@ -425,7 +425,6 @@

    FP (Single Query Track)

    - + - diff --git a/archive/2022/results/fparith-incremental.html b/archive/2022/results/fparith-incremental.html index 45bbdfdb..ade7ef19 100644 --- a/archive/2022/results/fparith-incremental.html +++ b/archive/2022/results/fparith-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    FPArith (Incremental Track)

    Competition results for the FPArith - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    FPArith (Incremental Track)

    Parallel Performance Bitwuzla - - + + @@ -124,7 +124,7 @@

    FPArith (Incremental Track)

    - + Bitwuzla 0 34187320.3777320.435263706 @@ -133,7 +133,7 @@

    FPArith (Incremental Track)

    - + cvc5 0 32248428.8278428.57283107 @@ -142,7 +142,7 @@

    FPArith (Incremental Track)

    - + UltimateEliminator+MathSAT 0 2388241.53179.546366700 @@ -151,7 +151,7 @@

    FPArith (Incremental Track)

    - + 2019-CVC4-incn 0 2601200.01200.019801 @@ -175,7 +175,6 @@

    FPArith (Incremental Track)

    - + - diff --git a/archive/2022/results/fparith-proof-exhibition.html b/archive/2022/results/fparith-proof-exhibition.html index 4518b2b6..dff79841 100644 --- a/archive/2022/results/fparith-proof-exhibition.html +++ b/archive/2022/results/fparith-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    FPArith (Proof Exhibition Track)

    Competition results for the FPArith - + division - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    FPArith (Proof Exhibition Track)

    - + cvc5-lfsc 0 289 @@ -130,7 +130,7 @@

    FPArith (Proof Exhibition Track)

    - + cvc5 0 264 @@ -152,7 +152,7 @@

    FPArith (Proof Exhibition Track)

    - + cvc5-lfsc 0 2891075928.5471075916.3499060893 @@ -161,7 +161,7 @@

    FPArith (Proof Exhibition Track)

    - + cvc5 0 2641086768.5121086634.1749310899 @@ -185,7 +185,6 @@

    FPArith (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/fparith-single-query.html b/archive/2022/results/fparith-single-query.html index b7763360..6ee976f8 100644 --- a/archive/2022/results/fparith-single-query.html +++ b/archive/2022/results/fparith-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    FPArith (Single Query Track)

    Competition results for the FPArith - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    FPArith (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) BitwuzlaBitwuzlaBitwuzla - - + + Bitwuzla - - + + Bitwuzla - + @@ -131,7 +131,7 @@

    FPArith (Single Query Track)

    - + Bitwuzla 0 1620 @@ -142,7 +142,7 @@

    FPArith (Single Query Track)

    - + z3-4.8.17n 0 1569 @@ -153,7 +153,7 @@

    FPArith (Single Query Track)

    - + cvc5 0 1518 @@ -164,7 +164,7 @@

    FPArith (Single Query Track)

    - + 2021-cvc5n 0 1474 @@ -175,7 +175,7 @@

    FPArith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 277 @@ -197,7 +197,7 @@

    FPArith (Single Query Track)

    - + Bitwuzla 0 1620286414.234286321.232162043111892150213 @@ -206,7 +206,7 @@

    FPArith (Single Query Track)

    - + z3-4.8.17n 0 1569377593.85377502.092156948910802660256 @@ -215,7 +215,7 @@

    FPArith (Single Query Track)

    - + cvc5 0 1518387024.718387768.927151840011183170280 @@ -224,7 +224,7 @@

    FPArith (Single Query Track)

    - + 2021-cvc5n 0 1474436403.597436831.004147435111233610324 @@ -233,7 +233,7 @@

    FPArith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 27744345.3141654.593277861911558030 @@ -253,7 +253,7 @@

    FPArith (Single Query Track)

    - + z3-4.8.17n 0 489128367.961128342.5474894890841262256 @@ -262,7 +262,7 @@

    FPArith (Single Query Track)

    - + Bitwuzla 0 431190574.661190533.25243143101421262213 @@ -271,7 +271,7 @@

    FPArith (Single Query Track)

    - + cvc5 0 400200421.358200328.79540040001731262280 @@ -280,7 +280,7 @@

    FPArith (Single Query Track)

    - + 2021-cvc5n 0 351247412.994247465.28835135102221262324 @@ -289,7 +289,7 @@

    FPArith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 863631.943018.78886860487126230 @@ -309,7 +309,7 @@

    FPArith (Single Query Track)

    - + Bitwuzla 0 118921439.57321387.981189011897639213 @@ -318,7 +318,7 @@

    FPArith (Single Query Track)

    - + 2021-cvc5n 0 1123110989.689111364.80311230112373639324 @@ -327,7 +327,7 @@

    FPArith (Single Query Track)

    - + cvc5 0 1118114602.018115438.7911180111878639280 @@ -336,7 +336,7 @@

    FPArith (Single Query Track)

    - + z3-4.8.17n 0 1080179541.343179474.985108001080116639256 @@ -345,7 +345,7 @@

    FPArith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 19121261.62319268.1841910191100563930 @@ -365,7 +365,7 @@

    FPArith (Single Query Track)

    - + Bitwuzla 0 14649980.5179973.854146433511293710369 @@ -374,7 +374,7 @@

    FPArith (Single Query Track)

    - + cvc5 0 134212952.11212907.775134231310294930456 @@ -383,7 +383,7 @@

    FPArith (Single Query Track)

    - + 2021-cvc5n 0 131913433.9213415.385131928710325160479 @@ -392,7 +392,7 @@

    FPArith (Single Query Track)

    - + z3-4.8.17n 0 128015474.43115448.22612803689125550547 @@ -401,7 +401,7 @@

    FPArith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 26810595.7067658.447268801881567046 @@ -425,7 +425,6 @@

    FPArith (Single Query Track)

    - + - diff --git a/archive/2022/results/fparith-unsat-core.html b/archive/2022/results/fparith-unsat-core.html index 3a463006..549687f9 100644 --- a/archive/2022/results/fparith-unsat-core.html +++ b/archive/2022/results/fparith-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    FPArith (Unsat Core Track)

    Competition results for the FPArith - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    FPArith (Unsat Core Track)

    Sequential PerformanceParallel Performance —— - - + + @@ -126,7 +126,7 @@

    FPArith (Unsat Core Track)

    - + 2020-CVC4-ucn 0 50 @@ -137,7 +137,7 @@

    FPArith (Unsat Core Track)

    - + z3-4.8.17n 0 50 @@ -148,7 +148,7 @@

    FPArith (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -159,7 +159,7 @@

    FPArith (Unsat Core Track)

    - + cvc5 0 0 @@ -181,7 +181,7 @@

    FPArith (Unsat Core Track)

    - + 2020-CVC4-ucn 0 5015.36515.3610 @@ -190,7 +190,7 @@

    FPArith (Unsat Core Track)

    - + z3-4.8.17n 0 501589.7281589.7651 @@ -199,7 +199,7 @@

    FPArith (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0103.81160.7660 @@ -208,7 +208,7 @@

    FPArith (Unsat Core Track)

    - + cvc5 0 08431.9248431.9227 @@ -232,7 +232,6 @@

    FPArith (Unsat Core Track)

    - + - diff --git a/archive/2022/results/fplra-proof-exhibition.html b/archive/2022/results/fplra-proof-exhibition.html index 36fb91ea..53bdde96 100644 --- a/archive/2022/results/fplra-proof-exhibition.html +++ b/archive/2022/results/fplra-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    FPLRA (Proof Exhibition Track)

    Competition results for the FPLRA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    FPLRA (Proof Exhibition Track)

    - + cvc5 0 2 @@ -130,7 +130,7 @@

    FPLRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 2 @@ -152,7 +152,7 @@

    FPLRA (Proof Exhibition Track)

    - + cvc5 0 215600.86615600.8641313 @@ -161,7 +161,7 @@

    FPLRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 215600.89215600.8911313 @@ -185,7 +185,6 @@

    FPLRA (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/fplra-single-query.html b/archive/2022/results/fplra-single-query.html index 7b8df746..8143c6ea 100644 --- a/archive/2022/results/fplra-single-query.html +++ b/archive/2022/results/fplra-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    FPLRA (Single Query Track)

    Competition results for the FPLRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    FPLRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) BitwuzlaBitwuzlaBitwuzla - - + + Bitwuzla - - + + Bitwuzla - + @@ -131,7 +131,7 @@

    FPLRA (Single Query Track)

    - + z3-4.8.17n 0 70 @@ -142,7 +142,7 @@

    FPLRA (Single Query Track)

    - + Bitwuzla 0 58 @@ -153,7 +153,7 @@

    FPLRA (Single Query Track)

    - + cvc5 0 50 @@ -164,7 +164,7 @@

    FPLRA (Single Query Track)

    - + 2021-cvc5n 0 50 @@ -175,7 +175,7 @@

    FPLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 36 @@ -197,7 +197,7 @@

    FPLRA (Single Query Track)

    - + z3-4.8.17n 0 7022640.47322611.9837055151717 @@ -206,7 +206,7 @@

    FPLRA (Single Query Track)

    - + Bitwuzla 0 5837770.24637770.7145843152929 @@ -215,7 +215,7 @@

    FPLRA (Single Query Track)

    - + cvc5 0 5042183.81642175.1555035153735 @@ -224,7 +224,7 @@

    FPLRA (Single Query Track)

    - + 2021-cvc5n 0 5042250.53242250.5495035153735 @@ -233,7 +233,7 @@

    FPLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 36560.237402.55362313510 @@ -253,7 +253,7 @@

    FPLRA (Single Query Track)

    - + z3-4.8.17n 0 5513050.17513021.48155550102217 @@ -262,7 +262,7 @@

    FPLRA (Single Query Track)

    - + Bitwuzla 0 4329368.60229369.06643430222229 @@ -271,7 +271,7 @@

    FPLRA (Single Query Track)

    - + cvc5 0 3533757.34133748.66235350302235 @@ -280,7 +280,7 @@

    FPLRA (Single Query Track)

    - + 2021-cvc5n 0 3533824.4933824.50135350302235 @@ -289,7 +289,7 @@

    FPLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 23422.995307.4092323042220 @@ -309,7 +309,7 @@

    FPLRA (Single Query Track)

    - + Bitwuzla 0 151.6441.6481501507229 @@ -318,7 +318,7 @@

    FPLRA (Single Query Track)

    - + 2021-cvc5n 0 1526.04226.0481501507235 @@ -327,7 +327,7 @@

    FPLRA (Single Query Track)

    - + cvc5 0 1526.47526.4931501507235 @@ -336,7 +336,7 @@

    FPLRA (Single Query Track)

    - + z3-4.8.17n 0 151190.2981190.5021501507217 @@ -345,7 +345,7 @@

    FPLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 1388.20358.715130132720 @@ -365,7 +365,7 @@

    FPLRA (Single Query Track)

    - + Bitwuzla 0 53841.678841.6855338153434 @@ -374,7 +374,7 @@

    FPLRA (Single Query Track)

    - + z3-4.8.17n 0 501154.3211154.252504553737 @@ -383,7 +383,7 @@

    FPLRA (Single Query Track)

    - + cvc5 0 49946.357937.6884934153836 @@ -392,7 +392,7 @@

    FPLRA (Single Query Track)

    - + 2021-cvc5n 0 49952.956952.964934153836 @@ -401,7 +401,7 @@

    FPLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 36560.237402.55362313510 @@ -425,7 +425,6 @@

    FPLRA (Single Query Track)

    - + - diff --git a/archive/2022/results/fplra-unsat-core.html b/archive/2022/results/fplra-unsat-core.html index 18b21517..b8d1dfc4 100644 --- a/archive/2022/results/fplra-unsat-core.html +++ b/archive/2022/results/fplra-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    FPLRA (Unsat Core Track)

    Competition results for the FPLRA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    FPLRA (Unsat Core Track)

    Sequential PerformanceParallel Performance —— - - + + @@ -126,7 +126,7 @@

    FPLRA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 0 @@ -137,7 +137,7 @@

    FPLRA (Unsat Core Track)

    - + z3-4.8.17n 0 0 @@ -148,7 +148,7 @@

    FPLRA (Unsat Core Track)

    - + cvc5 0 0 @@ -159,7 +159,7 @@

    FPLRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -181,7 +181,7 @@

    FPLRA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 00.5250.5240 @@ -190,7 +190,7 @@

    FPLRA (Unsat Core Track)

    - + z3-4.8.17n 0 00.6130.6080 @@ -199,7 +199,7 @@

    FPLRA (Unsat Core Track)

    - + cvc5 0 01.0681.0670 @@ -208,7 +208,7 @@

    FPLRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 09.8235.6630 @@ -232,7 +232,6 @@

    FPLRA (Unsat Core Track)

    - + - diff --git a/archive/2022/results/largest-contribution-incremental.html b/archive/2022/results/largest-contribution-incremental.html index a9b51355..5ba0a9ec 100644 --- a/archive/2022/results/largest-contribution-incremental.html +++ b/archive/2022/results/largest-contribution-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -90,27 +90,27 @@

    Winners

    - - + + - - - - + + + + - - + + - - - - + + + +
    Parallel Performance
    - + cvc5 - +
    @@ -130,7 +130,7 @@

    Parallel Performance

    - + cvc5 0.06964238 @@ -144,7 +144,7 @@

    Parallel Performance

    - + cvc5 0.06308624 @@ -158,7 +158,7 @@

    Parallel Performance

    - + Yices2 0.0342871 @@ -172,7 +172,7 @@

    Parallel Performance

    - + Yices2 0.00777014 @@ -186,7 +186,7 @@

    Parallel Performance

    - + smtinterpol 0.00485473 @@ -200,7 +200,7 @@

    Parallel Performance

    - + Yices2 0.00315157 @@ -214,7 +214,7 @@

    Parallel Performance

    - + cvc5 0.00167428 @@ -228,7 +228,7 @@

    Parallel Performance

    - + smtinterpol 0.00044447 @@ -242,7 +242,7 @@

    Parallel Performance

    - + Yices2 0.00039536 @@ -256,7 +256,7 @@

    Parallel Performance

    - + cvc5 0.00012916 @@ -270,7 +270,7 @@

    Parallel Performance

    - + Yices2 0.00011651 @@ -284,7 +284,7 @@

    Parallel Performance

    - + Bitwuzla 4.217e-05 @@ -298,7 +298,7 @@

    Parallel Performance

    - + Yices2 0.0 @@ -312,7 +312,7 @@

    Parallel Performance

    - + cvc5 0.0 @@ -326,7 +326,7 @@

    Parallel Performance

    - + cvc5 0.0 @@ -364,7 +364,6 @@

    Parallel Performance

    - + - diff --git a/archive/2022/results/largest-contribution-model-validation.html b/archive/2022/results/largest-contribution-model-validation.html index 00cba1f8..9aec00d7 100644 --- a/archive/2022/results/largest-contribution-model-validation.html +++ b/archive/2022/results/largest-contribution-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -90,35 +90,35 @@

    Winners

    - + - - + + - - - - + + + + - + - - + + - - - - + + + +
    Sequential PerformanceParallel Performance
    - + Z3++ - + - + Z3++ - +

    Sequential Performance

    @@ -136,7 +136,7 @@

    Sequential Performance

    - + Z3++ 0.00325164 @@ -150,7 +150,7 @@

    Sequential Performance

    - + Bitwuzla 0.00154221 @@ -164,7 +164,7 @@

    Sequential Performance

    - + smtinterpol 0.00139835 @@ -178,7 +178,7 @@

    Sequential Performance

    - + OpenSMT 0.000324 @@ -192,7 +192,7 @@

    Sequential Performance

    - + Bitwuzla 8.022e-05 @@ -206,7 +206,7 @@

    Sequential Performance

    - + Yices2 0.0 @@ -235,7 +235,7 @@

    Parallel Performance

    - + Z3++ 0.00325164 @@ -249,7 +249,7 @@

    Parallel Performance

    - + Bitwuzla 0.00154221 @@ -263,7 +263,7 @@

    Parallel Performance

    - + smtinterpol 0.00139835 @@ -277,7 +277,7 @@

    Parallel Performance

    - + OpenSMT 0.000324 @@ -291,7 +291,7 @@

    Parallel Performance

    - + Bitwuzla 8.022e-05 @@ -305,7 +305,7 @@

    Parallel Performance

    - + Yices2 0.0 @@ -343,7 +343,6 @@

    Parallel Performance

    - + - diff --git a/archive/2022/results/largest-contribution-single-query.html b/archive/2022/results/largest-contribution-single-query.html index 19a8f31a..ef6b6d6c 100644 --- a/archive/2022/results/largest-contribution-single-query.html +++ b/archive/2022/results/largest-contribution-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -90,59 +90,59 @@

    Winners

    - + - - + + - - + + - - + + - - + + - + - + - - + + - - + + - - + + - - + + - +
    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24 seconds Performance (parallel)
    - + cvc5 - + - + cvc5 - + - + cvc5 - + - + cvc5 - + - + cvc5 - +

    Sequential Performance

    @@ -160,7 +160,7 @@

    Sequential Performance

    - + cvc5 0.02293044 @@ -174,7 +174,7 @@

    Sequential Performance

    - + cvc5 0.02197364 @@ -188,7 +188,7 @@

    Sequential Performance

    - + cvc5 0.01422021 @@ -202,7 +202,7 @@

    Sequential Performance

    - + cvc5 0.01013353 @@ -216,7 +216,7 @@

    Sequential Performance

    - + cvc5 0.00453546 @@ -230,7 +230,7 @@

    Sequential Performance

    - + YicesQS 0.00178084 @@ -244,7 +244,7 @@

    Sequential Performance

    - + Bitwuzla 0.00135428 @@ -258,7 +258,7 @@

    Sequential Performance

    - + Z3++ 0.00094022 @@ -272,7 +272,7 @@

    Sequential Performance

    - + SMT-RAT-MCSAT 22.06 0.000913 @@ -286,7 +286,7 @@

    Sequential Performance

    - + Bitwuzla 0.00058693 @@ -300,7 +300,7 @@

    Sequential Performance

    - + cvc5 0.00030775 @@ -314,7 +314,7 @@

    Sequential Performance

    - + COLIBRI 0.0002383 @@ -328,7 +328,7 @@

    Sequential Performance

    - + Yices2 0.00022799 @@ -342,7 +342,7 @@

    Sequential Performance

    - + Bitwuzla 0.00021442 @@ -356,7 +356,7 @@

    Sequential Performance

    - + Bitwuzla 0.00018042 @@ -370,7 +370,7 @@

    Sequential Performance

    - + Yices2 0.00010827 @@ -384,7 +384,7 @@

    Sequential Performance

    - + veriT 0.0 @@ -413,7 +413,7 @@

    Parallel Performance

    - + cvc5 0.02293044 @@ -427,7 +427,7 @@

    Parallel Performance

    - + cvc5 0.01863765 @@ -441,7 +441,7 @@

    Parallel Performance

    - + cvc5 0.01162292 @@ -455,7 +455,7 @@

    Parallel Performance

    - + cvc5 0.00827626 @@ -469,7 +469,7 @@

    Parallel Performance

    - + cvc5 0.00453546 @@ -483,7 +483,7 @@

    Parallel Performance

    - + YicesQS 0.00178084 @@ -497,7 +497,7 @@

    Parallel Performance

    - + Bitwuzla 0.00135428 @@ -511,7 +511,7 @@

    Parallel Performance

    - + Z3++ 0.00094022 @@ -525,7 +525,7 @@

    Parallel Performance

    - + SMT-RAT-MCSAT 22.06 0.000913 @@ -539,7 +539,7 @@

    Parallel Performance

    - + Bitwuzla 0.000587 @@ -553,7 +553,7 @@

    Parallel Performance

    - + cvc5 0.00030775 @@ -567,7 +567,7 @@

    Parallel Performance

    - + COLIBRI 0.0002383 @@ -581,7 +581,7 @@

    Parallel Performance

    - + Yices2 0.00022799 @@ -595,7 +595,7 @@

    Parallel Performance

    - + Bitwuzla 0.00019655 @@ -609,7 +609,7 @@

    Parallel Performance

    - + Bitwuzla 0.00018042 @@ -623,7 +623,7 @@

    Parallel Performance

    - + Yices2 0.00010827 @@ -637,7 +637,7 @@

    Parallel Performance

    - + veriT 0.0 @@ -666,7 +666,7 @@

    SAT Performance

    - + cvc5 0.1685148 @@ -680,7 +680,7 @@

    SAT Performance

    - + cvc5 0.03438715 @@ -694,7 +694,7 @@

    SAT Performance

    - + cvc5 0.02840936 @@ -708,7 +708,7 @@

    SAT Performance

    - + cvc5 0.01442777 @@ -722,7 +722,7 @@

    SAT Performance

    - + cvc5 0.00590496 @@ -736,7 +736,7 @@

    SAT Performance

    - + YicesQS 0.00237769 @@ -750,7 +750,7 @@

    SAT Performance

    - + Bitwuzla 0.00225681 @@ -764,7 +764,7 @@

    SAT Performance

    - + Bitwuzla 0.00146084 @@ -778,7 +778,7 @@

    SAT Performance

    - + Z3++ 0.00082287 @@ -792,7 +792,7 @@

    SAT Performance

    - + Z3++ 0.000818 @@ -806,7 +806,7 @@

    SAT Performance

    - + Bitwuzla 0.00032325 @@ -820,7 +820,7 @@

    SAT Performance

    - + smtinterpol 0.00031997 @@ -834,7 +834,7 @@

    SAT Performance

    - + Bitwuzla 0.00016607 @@ -848,7 +848,7 @@

    SAT Performance

    - + Yices2 0.00015835 @@ -862,7 +862,7 @@

    SAT Performance

    - + smtinterpol 0.00010599 @@ -876,7 +876,7 @@

    SAT Performance

    - + COLIBRI 4.818e-05 @@ -890,7 +890,7 @@

    SAT Performance

    - + Yices2 0.0 @@ -919,7 +919,7 @@

    UNSAT Performance

    - + cvc5 0.01876725 @@ -933,7 +933,7 @@

    UNSAT Performance

    - + Z3++ 0.0090453 @@ -947,7 +947,7 @@

    UNSAT Performance

    - + cvc5 0.00780429 @@ -961,7 +961,7 @@

    UNSAT Performance

    - + cvc5 0.00471552 @@ -975,7 +975,7 @@

    UNSAT Performance

    - + cvc5 0.00458981 @@ -989,7 +989,7 @@

    UNSAT Performance

    - + OSTRICH 0.00163314 @@ -1003,7 +1003,7 @@

    UNSAT Performance

    - + YicesQS 0.00123626 @@ -1017,7 +1017,7 @@

    UNSAT Performance

    - + Z3++ 0.00101593 @@ -1031,7 +1031,7 @@

    UNSAT Performance

    - + cvc5 0.00087951 @@ -1045,7 +1045,7 @@

    UNSAT Performance

    - + Bitwuzla 0.00087942 @@ -1059,7 +1059,7 @@

    UNSAT Performance

    - + SMT-RAT-MCSAT 22.06 0.00045094 @@ -1073,7 +1073,7 @@

    UNSAT Performance

    - + Yices2 0.00043342 @@ -1087,7 +1087,7 @@

    UNSAT Performance

    - + COLIBRI 0.00031876 @@ -1101,7 +1101,7 @@

    UNSAT Performance

    - + Yices2 0.00028065 @@ -1115,7 +1115,7 @@

    UNSAT Performance

    - + Yices2 0.00021898 @@ -1129,7 +1129,7 @@

    UNSAT Performance

    - + Bitwuzla 0.00013019 @@ -1143,7 +1143,7 @@

    UNSAT Performance

    - + OpenSMT 7.371e-05 @@ -1157,7 +1157,7 @@

    UNSAT Performance

    - + veriT 0.0 @@ -1186,7 +1186,7 @@

    24s Performance

    - + cvc5 0.02410528 @@ -1200,7 +1200,7 @@

    24s Performance

    - + cvc5 0.01998829 @@ -1214,7 +1214,7 @@

    24s Performance

    - + Vampire 0.01978803 @@ -1228,7 +1228,7 @@

    24s Performance

    - + Yices2 0.01184931 @@ -1242,7 +1242,7 @@

    24s Performance

    - + cvc5 0.00962355 @@ -1256,7 +1256,7 @@

    24s Performance

    - + Z3++ 0.00935787 @@ -1270,7 +1270,7 @@

    24s Performance

    - + cvc5 0.00790622 @@ -1284,7 +1284,7 @@

    24s Performance

    - + YicesQS 0.0026162 @@ -1298,7 +1298,7 @@

    24s Performance

    - + Bitwuzla 0.00155748 @@ -1312,7 +1312,7 @@

    24s Performance

    - + Bitwuzla 0.00097344 @@ -1326,7 +1326,7 @@

    24s Performance

    - + Yices2 0.00075469 @@ -1340,7 +1340,7 @@

    24s Performance

    - + COLIBRI 0.00070545 @@ -1354,7 +1354,7 @@

    24s Performance

    - + Yices2 0.00060445 @@ -1368,7 +1368,7 @@

    24s Performance

    - + Bitwuzla 0.00043256 @@ -1382,7 +1382,7 @@

    24s Performance

    - + smtinterpol 0.00040399 @@ -1396,7 +1396,7 @@

    24s Performance

    - + Yices2 0.00034961 @@ -1410,7 +1410,7 @@

    24s Performance

    - + Bitwuzla 0.00028858 @@ -1424,7 +1424,7 @@

    24s Performance

    - + Yices2 2.481e-05 @@ -1456,7 +1456,6 @@

    24s Performance

    - + - diff --git a/archive/2022/results/largest-contribution-unsat-core.html b/archive/2022/results/largest-contribution-unsat-core.html index ceee461e..93a15efb 100644 --- a/archive/2022/results/largest-contribution-unsat-core.html +++ b/archive/2022/results/largest-contribution-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -90,35 +90,35 @@

    Winners

    - + - - + + - - - - + + + + - + - - + + - - - - + + + +
    Sequential PerformanceParallel Performance
    - + cvc5 - + - + cvc5 - +

    Sequential Performance

    @@ -136,7 +136,7 @@

    Sequential Performance

    - + cvc5 0.0521587 @@ -150,7 +150,7 @@

    Sequential Performance

    - + cvc5 0.04115421 @@ -164,7 +164,7 @@

    Sequential Performance

    - + Bitwuzla 0.01043474 @@ -178,7 +178,7 @@

    Sequential Performance

    - + Bitwuzla 0.00991293 @@ -192,7 +192,7 @@

    Sequential Performance

    - + cvc5 0.00894203 @@ -206,7 +206,7 @@

    Sequential Performance

    - + Yices2 0.00838525 @@ -220,7 +220,7 @@

    Sequential Performance

    - + cvc5 0.0056515 @@ -234,7 +234,7 @@

    Sequential Performance

    - + Yices2 0.00170943 @@ -248,7 +248,7 @@

    Sequential Performance

    - + Yices2 0.00110382 @@ -262,7 +262,7 @@

    Sequential Performance

    - + Yices2 0.00043344 @@ -291,7 +291,7 @@

    Parallel Performance

    - + cvc5 0.04957972 @@ -305,7 +305,7 @@

    Parallel Performance

    - + cvc5 0.03817682 @@ -319,7 +319,7 @@

    Parallel Performance

    - + Bitwuzla 0.01043474 @@ -333,7 +333,7 @@

    Parallel Performance

    - + Bitwuzla 0.00991293 @@ -347,7 +347,7 @@

    Parallel Performance

    - + cvc5 0.00894203 @@ -361,7 +361,7 @@

    Parallel Performance

    - + Yices2 0.00804904 @@ -375,7 +375,7 @@

    Parallel Performance

    - + Vampire 0.0056803 @@ -389,7 +389,7 @@

    Parallel Performance

    - + Yices2 0.00170942 @@ -403,7 +403,7 @@

    Parallel Performance

    - + Yices2 0.00110382 @@ -417,7 +417,7 @@

    Parallel Performance

    - + Yices2 0.00043226 @@ -455,7 +455,6 @@

    Parallel Performance

    - + - diff --git a/archive/2022/results/lia-incremental.html b/archive/2022/results/lia-incremental.html index 29cfb1f3..93f2a4f5 100644 --- a/archive/2022/results/lia-incremental.html +++ b/archive/2022/results/lia-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    LIA (Incremental Track)

    Competition results for the LIA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    LIA (Incremental Track)

    Parallel Performance cvc5 - - + + @@ -124,7 +124,7 @@

    LIA (Incremental Track)

    - + z3-4.8.17n 0 253939.1818.25700 @@ -133,7 +133,7 @@

    LIA (Incremental Track)

    - + 2021-cvc5-incn 0 2539322.61121.32400 @@ -142,7 +142,7 @@

    LIA (Incremental Track)

    - + cvc5 0 2539350.38448.96200 @@ -151,7 +151,7 @@

    LIA (Incremental Track)

    - + UltimateEliminator+MathSAT 0 25393219.588120.13800 @@ -160,7 +160,7 @@

    LIA (Incremental Track)

    - + smtinterpol 0 25391108.13332.63320 @@ -184,7 +184,6 @@

    LIA (Incremental Track)

    - + - diff --git a/archive/2022/results/lia-proof-exhibition.html b/archive/2022/results/lia-proof-exhibition.html index 58220e0d..fb684ba3 100644 --- a/archive/2022/results/lia-proof-exhibition.html +++ b/archive/2022/results/lia-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    LIA (Proof Exhibition Track)

    Competition results for the LIA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    LIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 263 @@ -130,7 +130,7 @@

    LIA (Proof Exhibition Track)

    - + cvc5 0 253 @@ -141,7 +141,7 @@

    LIA (Proof Exhibition Track)

    - + smtinterpol 0 179 @@ -152,7 +152,7 @@

    LIA (Proof Exhibition Track)

    - + veriT 0 169 @@ -174,7 +174,7 @@

    LIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 2633645.2473645.07733 @@ -183,7 +183,7 @@

    LIA (Proof Exhibition Track)

    - + cvc5 0 25313145.38113108.9851310 @@ -192,7 +192,7 @@

    LIA (Proof Exhibition Track)

    - + smtinterpol 0 17926719.26426311.6868718 @@ -201,7 +201,7 @@

    LIA (Proof Exhibition Track)

    - + veriT 0 16936649.02536647.4789727 @@ -225,7 +225,6 @@

    LIA (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/lia-single-query.html b/archive/2022/results/lia-single-query.html index bd2d321c..8647aba8 100644 --- a/archive/2022/results/lia-single-query.html +++ b/archive/2022/results/lia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    LIA (Single Query Track)

    Competition results for the LIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    LIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    LIA (Single Query Track)

    - + z3-4.8.17n 0 300 @@ -142,7 +142,7 @@

    LIA (Single Query Track)

    - + cvc5 0 300 @@ -153,7 +153,7 @@

    LIA (Single Query Track)

    - + 2021-z3n 0 292 @@ -164,7 +164,7 @@

    LIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 228 @@ -175,7 +175,7 @@

    LIA (Single Query Track)

    - + YicesQS 0 182 @@ -186,7 +186,7 @@

    LIA (Single Query Track)

    - + Vampire 0 157 @@ -197,7 +197,7 @@

    LIA (Single Query Track)

    - + smtinterpol 0 97 @@ -208,7 +208,7 @@

    LIA (Single Query Track)

    - + veriT 0 75 @@ -230,7 +230,7 @@

    LIA (Single Query Track)

    - + z3-4.8.17n 0 30010.81910.34430013716300 @@ -239,7 +239,7 @@

    LIA (Single Query Track)

    - + cvc5 0 30078.31878.21330013716300 @@ -248,7 +248,7 @@

    LIA (Single Query Track)

    - + 2021-z3n 0 2929609.6179609.66229213715588 @@ -257,7 +257,7 @@

    LIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 23095788.57193730.482230741567070 @@ -266,7 +266,7 @@

    LIA (Single Query Track)

    - + YicesQS 0 182141150.224141150.25218210181118117 @@ -275,7 +275,7 @@

    LIA (Single Query Track)

    - + Vampire 0 157197786.226171827.1881573154143143 @@ -284,7 +284,7 @@

    LIA (Single Query Track)

    - + smtinterpol 0 97130473.89129603.09397889203100 @@ -293,7 +293,7 @@

    LIA (Single Query Track)

    - + veriT 0 7524507.65924481.6837507522519 @@ -313,7 +313,7 @@

    LIA (Single Query Track)

    - + 2021-z3n 0 1375.4575.477137137001638 @@ -322,7 +322,7 @@

    LIA (Single Query Track)

    - + z3-4.8.17n 0 1376.0895.867137137001630 @@ -331,7 +331,7 @@

    LIA (Single Query Track)

    - + cvc5 0 13760.41460.383137137001630 @@ -340,7 +340,7 @@

    LIA (Single Query Track)

    - + YicesQS 0 10142740.87142740.899101101036163117 @@ -349,7 +349,7 @@

    LIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 7484098.81182827.376747406316370 @@ -358,7 +358,7 @@

    LIA (Single Query Track)

    - + smtinterpol 0 8102026.547101460.228880129163100 @@ -367,7 +367,7 @@

    LIA (Single Query Track)

    - + Vampire 0 3186004.955160765.929330134163143 @@ -376,7 +376,7 @@

    LIA (Single Query Track)

    - + veriT 0 01196.0671186.71600013716319 @@ -396,7 +396,7 @@

    LIA (Single Query Track)

    - + z3-4.8.17n 0 1634.7314.477163016301370 @@ -405,7 +405,7 @@

    LIA (Single Query Track)

    - + cvc5 0 16317.90417.83163016301370 @@ -414,7 +414,7 @@

    LIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 15611689.7610903.1061560156713770 @@ -423,7 +423,7 @@

    LIA (Single Query Track)

    - + 2021-z3n 0 1559604.169604.185155015581378 @@ -432,7 +432,7 @@

    LIA (Single Query Track)

    - + Vampire 0 15411781.27211061.25915401549137143 @@ -441,7 +441,7 @@

    LIA (Single Query Track)

    - + smtinterpol 0 8928447.34428142.8648908974137100 @@ -450,7 +450,7 @@

    LIA (Single Query Track)

    - + YicesQS 0 8198409.35398409.3538108182137117 @@ -459,7 +459,7 @@

    LIA (Single Query Track)

    - + veriT 0 7523311.59223294.968750758813719 @@ -479,7 +479,7 @@

    LIA (Single Query Track)

    - + z3-4.8.17n 0 30010.81910.34430013716300 @@ -488,7 +488,7 @@

    LIA (Single Query Track)

    - + cvc5 0 30078.31878.21330013716300 @@ -497,7 +497,7 @@

    LIA (Single Query Track)

    - + 2021-z3n 0 292201.617201.66229213715588 @@ -506,7 +506,7 @@

    LIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 2064250.0793229.435206571499494 @@ -515,7 +515,7 @@

    LIA (Single Query Track)

    - + YicesQS 0 1743108.9243108.9381749381126125 @@ -524,7 +524,7 @@

    LIA (Single Query Track)

    - + Vampire 0 1533726.8063592.2311533150147147 @@ -533,7 +533,7 @@

    LIA (Single Query Track)

    - + smtinterpol 0 973725.5343440.23997889203134 @@ -542,7 +542,7 @@

    LIA (Single Query Track)

    - + veriT 0 75804.494778.2967507522524 @@ -566,7 +566,6 @@

    LIA (Single Query Track)

    - + - diff --git a/archive/2022/results/lia-unsat-core.html b/archive/2022/results/lia-unsat-core.html index aea42206..047a577a 100644 --- a/archive/2022/results/lia-unsat-core.html +++ b/archive/2022/results/lia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    LIA (Unsat Core Track)

    Competition results for the LIA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    LIA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    LIA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 8 @@ -137,7 +137,7 @@

    LIA (Unsat Core Track)

    - + z3-4.8.17n 0 8 @@ -148,7 +148,7 @@

    LIA (Unsat Core Track)

    - + cvc5 0 7 @@ -159,7 +159,7 @@

    LIA (Unsat Core Track)

    - + Vampire 0 2 @@ -170,7 +170,7 @@

    LIA (Unsat Core Track)

    - + smtinterpol 0 0 @@ -181,7 +181,7 @@

    LIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 1 6 @@ -203,7 +203,7 @@

    LIA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 810.54110.5160 @@ -212,7 +212,7 @@

    LIA (Unsat Core Track)

    - + z3-4.8.17n 0 876.29976.1710 @@ -221,7 +221,7 @@

    LIA (Unsat Core Track)

    - + cvc5 0 7119.726119.6720 @@ -230,7 +230,7 @@

    LIA (Unsat Core Track)

    - + Vampire 0 212800.22212223.64510 @@ -239,7 +239,7 @@

    LIA (Unsat Core Track)

    - + smtinterpol 0 030097.04329781.40122 @@ -248,7 +248,7 @@

    LIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 1 612147.57511344.1138 @@ -272,7 +272,6 @@

    LIA (Unsat Core Track)

    - + - diff --git a/archive/2022/results/lra-cloud.html b/archive/2022/results/lra-cloud.html index 201b34b9..6c8fd92d 100644 --- a/archive/2022/results/lra-cloud.html +++ b/archive/2022/results/lra-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    LRA (Cloud Track)

    Competition results for the LRA - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    LRA (Cloud Track)

    - + Vampire 8 1332649.46613013270 @@ -126,7 +126,7 @@

    LRA (Cloud Track)

    - + cvc5-cloud 20 048000.0000400 @@ -146,7 +146,7 @@

    LRA (Cloud Track)

    - + cvc5-cloud 1 01200.00001360 @@ -155,7 +155,7 @@

    LRA (Cloud Track)

    - + Vampire 1 01200.00001360 @@ -175,7 +175,7 @@

    LRA (Cloud Track)

    - + Vampire 7 139849.466130138160 @@ -184,7 +184,7 @@

    LRA (Cloud Track)

    - + cvc5-cloud 19 025200.000021160 @@ -204,7 +204,7 @@

    LRA (Cloud Track)

    - + Vampire 0 12884.864120122828 @@ -213,7 +213,7 @@

    LRA (Cloud Track)

    - + cvc5-cloud 0 0960.00004040 @@ -237,7 +237,6 @@

    LRA (Cloud Track)

    - + - diff --git a/archive/2022/results/lra-incremental.html b/archive/2022/results/lra-incremental.html index 4babb25a..98c61e88 100644 --- a/archive/2022/results/lra-incremental.html +++ b/archive/2022/results/lra-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    LRA (Incremental Track)

    Competition results for the LRA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    LRA (Incremental Track)

    Parallel Performance cvc5 - - + + @@ -124,7 +124,7 @@

    LRA (Incremental Track)

    - + 2021-cvc5-incn 0 1596978.16770.12800 @@ -133,7 +133,7 @@

    LRA (Incremental Track)

    - + cvc5 0 15969175.935174.62100 @@ -142,7 +142,7 @@

    LRA (Incremental Track)

    - + UltimateEliminator+MathSAT 0 15969338.168247.4700 @@ -151,7 +151,7 @@

    LRA (Incremental Track)

    - + z3-4.8.17n 0 134773602.5843602.27624923 @@ -160,7 +160,7 @@

    LRA (Incremental Track)

    - + smtinterpol 0 127211262.5981222.59932481 @@ -184,7 +184,6 @@

    LRA (Incremental Track)

    - + - diff --git a/archive/2022/results/lra-parallel.html b/archive/2022/results/lra-parallel.html index 4b360ce6..51409cf0 100644 --- a/archive/2022/results/lra-parallel.html +++ b/archive/2022/results/lra-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    LRA (Parallel Track)

    Competition results for the LRA - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    LRA (Parallel Track)

    - + Vampire 7 1530640.28815015250 @@ -137,7 +137,7 @@

    LRA (Parallel Track)

    - + Vampire 1 01200.00001360 @@ -157,7 +157,7 @@

    LRA (Parallel Track)

    - + Vampire 6 157840.288150156160 @@ -177,7 +177,7 @@

    LRA (Parallel Track)

    - + Vampire 0 12749.582120122828 @@ -201,7 +201,6 @@

    LRA (Parallel Track)

    - + - diff --git a/archive/2022/results/lra-proof-exhibition.html b/archive/2022/results/lra-proof-exhibition.html index 88b1aab2..2a32e63f 100644 --- a/archive/2022/results/lra-proof-exhibition.html +++ b/archive/2022/results/lra-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    LRA (Proof Exhibition Track)

    Competition results for the LRA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    LRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 666 @@ -130,7 +130,7 @@

    LRA (Proof Exhibition Track)

    - + cvc5 0 664 @@ -141,7 +141,7 @@

    LRA (Proof Exhibition Track)

    - + smtinterpol 0 298 @@ -163,7 +163,7 @@

    LRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 666179133.205179086.275136136 @@ -172,7 +172,7 @@

    LRA (Proof Exhibition Track)

    - + cvc5 0 664179209.649179210.797138138 @@ -181,7 +181,7 @@

    LRA (Proof Exhibition Track)

    - + smtinterpol 0 29817731.90215837.59650412 @@ -205,7 +205,6 @@

    LRA (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/lra-single-query.html b/archive/2022/results/lra-single-query.html index 5466ccd8..093f08a3 100644 --- a/archive/2022/results/lra-single-query.html +++ b/archive/2022/results/lra-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    LRA (Single Query Track)

    Competition results for the LRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    LRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) YicesQSYicesQSYicesQS - - + + YicesQS - - + + YicesQS - + @@ -131,7 +131,7 @@

    LRA (Single Query Track)

    - + YicesQS 0 1003 @@ -142,7 +142,7 @@

    LRA (Single Query Track)

    - + 2021-z3n 0 948 @@ -153,7 +153,7 @@

    LRA (Single Query Track)

    - + z3-4.8.17n 0 936 @@ -164,7 +164,7 @@

    LRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 847 @@ -175,7 +175,7 @@

    LRA (Single Query Track)

    - + cvc5 0 834 @@ -186,7 +186,7 @@

    LRA (Single Query Track)

    - + Vampire 0 484 @@ -197,7 +197,7 @@

    LRA (Single Query Track)

    - + smtinterpol 0 164 @@ -219,7 +219,7 @@

    LRA (Single Query Track)

    - + YicesQS 0 1003413.503413.789100340859500 @@ -228,7 +228,7 @@

    LRA (Single Query Track)

    - + 2021-z3n 0 948107068.47107063.3029483875615555 @@ -237,7 +237,7 @@

    LRA (Single Query Track)

    - + z3-4.8.17n 0 936121640.083121628.7359363785586767 @@ -246,7 +246,7 @@

    LRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 847203336.187199665.048847327520156156 @@ -255,7 +255,7 @@

    LRA (Single Query Track)

    - + cvc5 0 834223996.817223992.503834344490169169 @@ -264,7 +264,7 @@

    LRA (Single Query Track)

    - + Vampire 0 484746608.829629466.7984840484519515 @@ -273,7 +273,7 @@

    LRA (Single Query Track)

    - + smtinterpol 0 16455535.31551526.051164116383936 @@ -293,7 +293,7 @@

    LRA (Single Query Track)

    - + YicesQS 0 408335.014335.156408408005950 @@ -302,7 +302,7 @@

    LRA (Single Query Track)

    - + 2021-z3n 0 38732756.30332757.06238738702159555 @@ -311,7 +311,7 @@

    LRA (Single Query Track)

    - + z3-4.8.17n 0 37844177.99144175.70737837803059567 @@ -320,7 +320,7 @@

    LRA (Single Query Track)

    - + cvc5 0 34481340.16681322.414344344064595169 @@ -329,7 +329,7 @@

    LRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 327103826.696102284.603327327081595156 @@ -338,7 +338,7 @@

    LRA (Single Query Track)

    - + smtinterpol 0 138376.57836772.911040759536 @@ -347,7 +347,7 @@

    LRA (Single Query Track)

    - + Vampire 0 0547202.14489496.77000408595515 @@ -367,7 +367,7 @@

    LRA (Single Query Track)

    - + YicesQS 0 59578.48978.634595059504080 @@ -376,7 +376,7 @@

    LRA (Single Query Track)

    - + 2021-z3n 0 56174312.16774306.2456105613440855 @@ -385,7 +385,7 @@

    LRA (Single Query Track)

    - + z3-4.8.17n 0 55877462.09177453.02855805583740867 @@ -394,7 +394,7 @@

    LRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 52099509.49197380.445520052075408156 @@ -403,7 +403,7 @@

    LRA (Single Query Track)

    - + cvc5 0 490142656.651142670.0894900490105408169 @@ -412,7 +412,7 @@

    LRA (Single Query Track)

    - + Vampire 0 484199406.689139970.0284840484111408515 @@ -421,7 +421,7 @@

    LRA (Single Query Track)

    - + smtinterpol 0 16317158.73714753.151163016343240836 @@ -441,7 +441,7 @@

    LRA (Single Query Track)

    - + YicesQS 0 1001174.252174.481100140659522 @@ -450,7 +450,7 @@

    LRA (Single Query Track)

    - + 2021-z3n 0 8355387.6395378.442835364471168168 @@ -459,7 +459,7 @@

    LRA (Single Query Track)

    - + z3-4.8.17n 0 8235521.1725506.703823354469180180 @@ -468,7 +468,7 @@

    LRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 80810518.9627716.21808314494195195 @@ -477,7 +477,7 @@

    LRA (Single Query Track)

    - + cvc5 0 7197344.6697335.9719294425284284 @@ -486,7 +486,7 @@

    LRA (Single Query Track)

    - + Vampire 0 22119847.83119055.482210221782781 @@ -495,7 +495,7 @@

    LRA (Single Query Track)

    - + smtinterpol 0 1586391.0513350.33158115784566 @@ -519,7 +519,6 @@

    LRA (Single Query Track)

    - + - diff --git a/archive/2022/results/nia-proof-exhibition.html b/archive/2022/results/nia-proof-exhibition.html index 7e48fe32..569f19e7 100644 --- a/archive/2022/results/nia-proof-exhibition.html +++ b/archive/2022/results/nia-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    NIA (Proof Exhibition Track)

    Competition results for the NIA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    NIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 123 @@ -130,7 +130,7 @@

    NIA (Proof Exhibition Track)

    - + cvc5 0 117 @@ -152,7 +152,7 @@

    NIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 12322502.71622503.0421818 @@ -161,7 +161,7 @@

    NIA (Proof Exhibition Track)

    - + cvc5 0 11726552.74426526.6012420 @@ -185,7 +185,6 @@

    NIA (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/nia-single-query.html b/archive/2022/results/nia-single-query.html index d7840868..111f1087 100644 --- a/archive/2022/results/nia-single-query.html +++ b/archive/2022/results/nia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    NIA (Single Query Track)

    Competition results for the NIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    NIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    NIA (Single Query Track)

    - + cvc5 0 190 @@ -142,7 +142,7 @@

    NIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 129 @@ -153,7 +153,7 @@

    NIA (Single Query Track)

    - + z3-4.8.17n 0 88 @@ -164,7 +164,7 @@

    NIA (Single Query Track)

    - + 2021-z3n 0 87 @@ -175,7 +175,7 @@

    NIA (Single Query Track)

    - + YicesQS 0 80 @@ -186,7 +186,7 @@

    NIA (Single Query Track)

    - + Vampire 0 63 @@ -208,7 +208,7 @@

    NIA (Single Query Track)

    - + cvc5 0 19025242.46925259.85190681221818 @@ -217,7 +217,7 @@

    NIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 12926326.05325870.92212933967921 @@ -226,7 +226,7 @@

    NIA (Single Query Track)

    - + z3-4.8.17n 0 88141335.905141335.961886028120117 @@ -235,7 +235,7 @@

    NIA (Single Query Track)

    - + 2021-z3n 0 8748935.93248933.22787543312139 @@ -244,7 +244,7 @@

    NIA (Single Query Track)

    - + YicesQS 0 80153889.946153889.982805525128128 @@ -253,7 +253,7 @@

    NIA (Single Query Track)

    - + Vampire 0 66202145.386173845.18166066142142 @@ -273,7 +273,7 @@

    NIA (Single Query Track)

    - + cvc5 0 684144.3064147.20368680213818 @@ -282,7 +282,7 @@

    NIA (Single Query Track)

    - + z3-4.8.17n 0 6012075.50312075.4046060010138117 @@ -291,7 +291,7 @@

    NIA (Single Query Track)

    - + YicesQS 0 5518174.53718174.5675555015138128 @@ -300,7 +300,7 @@

    NIA (Single Query Track)

    - + 2021-z3n 0 548448.4368447.319545401613839 @@ -309,7 +309,7 @@

    NIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 3311246.53311081.465333303713821 @@ -318,7 +318,7 @@

    NIA (Single Query Track)

    - + Vampire 0 098400.8983984.8800070138142 @@ -338,7 +338,7 @@

    NIA (Single Query Track)

    - + cvc5 0 12221098.16221112.6471220122167018 @@ -347,7 +347,7 @@

    NIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 9615079.5214789.45896096427021 @@ -356,7 +356,7 @@

    NIA (Single Query Track)

    - + Vampire 0 66103744.49689860.301660667270142 @@ -365,7 +365,7 @@

    NIA (Single Query Track)

    - + 2021-z3n 0 3340487.49640485.908330331057039 @@ -374,7 +374,7 @@

    NIA (Single Query Track)

    - + z3-4.8.17n 0 28129260.403129260.5572802811070117 @@ -383,7 +383,7 @@

    NIA (Single Query Track)

    - + YicesQS 0 25135715.409135715.4152502511370128 @@ -403,7 +403,7 @@

    NIA (Single Query Track)

    - + cvc5 0 177828.41827.347177621153131 @@ -412,7 +412,7 @@

    NIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 1291538.8611102.87812933967922 @@ -421,7 +421,7 @@

    NIA (Single Query Track)

    - + 2021-z3n 0 861252.8051249.65586533312245 @@ -430,7 +430,7 @@

    NIA (Single Query Track)

    - + z3-4.8.17n 0 852969.5072969.361855926123123 @@ -439,7 +439,7 @@

    NIA (Single Query Track)

    - + YicesQS 0 773253.7323253.738775324131131 @@ -448,7 +448,7 @@

    NIA (Single Query Track)

    - + Vampire 0 384420.684171.55838038170170 @@ -472,7 +472,6 @@

    NIA (Single Query Track)

    - + - diff --git a/archive/2022/results/nia-unsat-core.html b/archive/2022/results/nia-unsat-core.html index ede8a812..68810b37 100644 --- a/archive/2022/results/nia-unsat-core.html +++ b/archive/2022/results/nia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    NIA (Unsat Core Track)

    Competition results for the NIA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    NIA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    NIA (Unsat Core Track)

    - + cvc5 0 255 @@ -137,7 +137,7 @@

    NIA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 212 @@ -148,7 +148,7 @@

    NIA (Unsat Core Track)

    - + z3-4.8.17n 0 61 @@ -159,7 +159,7 @@

    NIA (Unsat Core Track)

    - + Vampire 0 8 @@ -170,7 +170,7 @@

    NIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 20 190 @@ -192,7 +192,7 @@

    NIA (Unsat Core Track)

    - + cvc5 0 25525897.01625892.36620 @@ -201,7 +201,7 @@

    NIA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 21229108.66429108.65724 @@ -210,7 +210,7 @@

    NIA (Unsat Core Track)

    - + z3-4.8.17n 0 6139676.15639666.40230 @@ -219,7 +219,7 @@

    NIA (Unsat Core Track)

    - + Vampire 0 8108538.88889267.13471 @@ -228,7 +228,7 @@

    NIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 20 19016265.35816011.16213 @@ -252,7 +252,6 @@

    NIA (Unsat Core Track)

    - + - diff --git a/archive/2022/results/nra-cloud.html b/archive/2022/results/nra-cloud.html index e8fbd854..886f19d5 100644 --- a/archive/2022/results/nra-cloud.html +++ b/archive/2022/results/nra-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    NRA (Cloud Track)

    Competition results for the NRA - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    NRA (Cloud Track)

    - + cvc5-cloud 0 15.83310100 @@ -126,7 +126,7 @@

    NRA (Cloud Track)

    - + Vampire 0 118.76310100 @@ -146,7 +146,7 @@

    NRA (Cloud Track)

    - + cvc5-cloud 0 00.0000010 @@ -155,7 +155,7 @@

    NRA (Cloud Track)

    - + Vampire 0 00.0000010 @@ -175,7 +175,7 @@

    NRA (Cloud Track)

    - + cvc5-cloud 0 15.833101000 @@ -184,7 +184,7 @@

    NRA (Cloud Track)

    - + Vampire 0 118.763101000 @@ -204,7 +204,7 @@

    NRA (Cloud Track)

    - + cvc5-cloud 0 15.83310100 @@ -213,7 +213,7 @@

    NRA (Cloud Track)

    - + Vampire 0 118.76310100 @@ -237,7 +237,6 @@

    NRA (Cloud Track)

    - + - diff --git a/archive/2022/results/nra-parallel.html b/archive/2022/results/nra-parallel.html index e0aeec75..a7269b07 100644 --- a/archive/2022/results/nra-parallel.html +++ b/archive/2022/results/nra-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    NRA (Parallel Track)

    Competition results for the NRA - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    NRA (Parallel Track)

    - + Vampire 0 12.60810100 @@ -137,7 +137,7 @@

    NRA (Parallel Track)

    - + Vampire 0 00.0000010 @@ -157,7 +157,7 @@

    NRA (Parallel Track)

    - + Vampire 0 12.608101000 @@ -177,7 +177,7 @@

    NRA (Parallel Track)

    - + Vampire 0 12.60810100 @@ -201,7 +201,6 @@

    NRA (Parallel Track)

    - + - diff --git a/archive/2022/results/nra-proof-exhibition.html b/archive/2022/results/nra-proof-exhibition.html index ca60c723..ed70e86f 100644 --- a/archive/2022/results/nra-proof-exhibition.html +++ b/archive/2022/results/nra-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    NRA (Proof Exhibition Track)

    Competition results for the NRA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    NRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 1892 @@ -130,7 +130,7 @@

    NRA (Proof Exhibition Track)

    - + cvc5 0 1892 @@ -152,7 +152,7 @@

    NRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 189221743.44421742.3371818 @@ -161,7 +161,7 @@

    NRA (Proof Exhibition Track)

    - + cvc5 0 189221771.50421767.681818 @@ -185,7 +185,6 @@

    NRA (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/nra-single-query.html b/archive/2022/results/nra-single-query.html index d02f782a..bf880d62 100644 --- a/archive/2022/results/nra-single-query.html +++ b/archive/2022/results/nra-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    NRA (Single Query Track)

    Competition results for the NRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    NRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) YicesQSYicesQSYicesQS - - + + YicesQS - - + + YicesQS - + @@ -131,7 +131,7 @@

    NRA (Single Query Track)

    - + 2021-z3n 0 94 @@ -142,7 +142,7 @@

    NRA (Single Query Track)

    - + YicesQS 0 94 @@ -153,7 +153,7 @@

    NRA (Single Query Track)

    - + z3-4.8.17n 0 90 @@ -164,7 +164,7 @@

    NRA (Single Query Track)

    - + cvc5 0 86 @@ -175,7 +175,7 @@

    NRA (Single Query Track)

    - + Vampire 0 83 @@ -186,7 +186,7 @@

    NRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 6 @@ -208,7 +208,7 @@

    NRA (Single Query Track)

    - + 2021-z3n 0 945004.3985004.7989439153 @@ -217,7 +217,7 @@

    NRA (Single Query Track)

    - + YicesQS 0 946164.7486164.7689449055 @@ -226,7 +226,7 @@

    NRA (Single Query Track)

    - + z3-4.8.17n 0 909671.3529671.349038797 @@ -235,7 +235,7 @@

    NRA (Single Query Track)

    - + cvc5 0 8616271.62616271.875863831313 @@ -244,7 +244,7 @@

    NRA (Single Query Track)

    - + Vampire 0 8322872.79219224.239830831616 @@ -253,7 +253,7 @@

    NRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 639958.84539814.1116159333 @@ -273,7 +273,7 @@

    NRA (Single Query Track)

    - + YicesQS 0 41200.9391200.9394401945 @@ -282,7 +282,7 @@

    NRA (Single Query Track)

    - + z3-4.8.17n 0 31646.641646.673302947 @@ -291,7 +291,7 @@

    NRA (Single Query Track)

    - + 2021-z3n 0 31745.9041745.9723302943 @@ -300,7 +300,7 @@

    NRA (Single Query Track)

    - + cvc5 0 32701.4132701.65633029413 @@ -309,7 +309,7 @@

    NRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 140.23924.32411049433 @@ -318,7 +318,7 @@

    NRA (Single Query Track)

    - + Vampire 0 06000.06000.000059416 @@ -338,7 +338,7 @@

    NRA (Single Query Track)

    - + 2021-z3n 0 912058.4932058.82791091263 @@ -347,7 +347,7 @@

    NRA (Single Query Track)

    - + YicesQS 0 903763.813763.82990090365 @@ -356,7 +356,7 @@

    NRA (Single Query Track)

    - + z3-4.8.17n 0 876824.7126824.6787087667 @@ -365,7 +365,7 @@

    NRA (Single Query Track)

    - + Vampire 0 8315672.79212024.2398308310616 @@ -374,7 +374,7 @@

    NRA (Single Query Track)

    - + cvc5 0 8312370.21312370.2198308310613 @@ -383,7 +383,7 @@

    NRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 539913.88739787.050588633 @@ -403,7 +403,7 @@

    NRA (Single Query Track)

    - + 2021-z3n 0 93167.55167.5859339066 @@ -412,7 +412,7 @@

    NRA (Single Query Track)

    - + YicesQS 0 92181.412181.4169248877 @@ -421,7 +421,7 @@

    NRA (Single Query Track)

    - + z3-4.8.17n 0 89242.316242.155893861010 @@ -430,7 +430,7 @@

    NRA (Single Query Track)

    - + cvc5 0 84371.377371.349842821515 @@ -439,7 +439,7 @@

    NRA (Single Query Track)

    - + Vampire 0 83456.672412.219830831616 @@ -448,7 +448,7 @@

    NRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 61150.8451006.1116159333 @@ -472,7 +472,6 @@

    NRA (Single Query Track)

    - + - diff --git a/archive/2022/results/qf-abv-incremental.html b/archive/2022/results/qf-abv-incremental.html index 7d69a95d..4fb5c6df 100644 --- a/archive/2022/results/qf-abv-incremental.html +++ b/archive/2022/results/qf-abv-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ABV (Incremental Track)

    Competition results for the QF_ABV - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_ABV (Incremental Track)

    Parallel Performance Yices2 - - + + @@ -124,7 +124,7 @@

    QF_ABV (Incremental Track)

    - + Yices2 0 129619257.62419219.0133013 @@ -133,7 +133,7 @@

    QF_ABV (Incremental Track)

    - + 2020-Yices2 incrementaln 0 129518821.11418790.3613113 @@ -142,7 +142,7 @@

    QF_ABV (Incremental Track)

    - + Bitwuzla 0 129249647.34749632.7183431 @@ -151,7 +151,7 @@

    QF_ABV (Incremental Track)

    - + MathSATn 0 123927404.08227341.3278719 @@ -160,7 +160,7 @@

    QF_ABV (Incremental Track)

    - + cvc5 0 122242843.41842779.08710429 @@ -169,7 +169,7 @@

    QF_ABV (Incremental Track)

    - + z3-4.8.17n 0 1147178971.182178944.208179134 @@ -193,7 +193,6 @@

    QF_ABV (Incremental Track)

    - + - diff --git a/archive/2022/results/qf-abv-proof-exhibition.html b/archive/2022/results/qf-abv-proof-exhibition.html index 23897259..54761bae 100644 --- a/archive/2022/results/qf-abv-proof-exhibition.html +++ b/archive/2022/results/qf-abv-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ABV (Proof Exhibition Track)

    Competition results for the QF_ABV - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_ABV (Proof Exhibition Track)

    - + cvc5-lfsc 0 2127 @@ -130,7 +130,7 @@

    QF_ABV (Proof Exhibition Track)

    - + cvc5 0 1609 @@ -152,7 +152,7 @@

    QF_ABV (Proof Exhibition Track)

    - + cvc5-lfsc 0 2127305329.72305291.471216211 @@ -161,7 +161,7 @@

    QF_ABV (Proof Exhibition Track)

    - + cvc5 0 1609849728.178849439.972734685 @@ -185,7 +185,6 @@

    QF_ABV (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/qf-abv-single-query.html b/archive/2022/results/qf-abv-single-query.html index 825844e3..22ee1df2 100644 --- a/archive/2022/results/qf-abv-single-query.html +++ b/archive/2022/results/qf-abv-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ABV (Single Query Track)

    Competition results for the QF_ABV - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_ABV (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) BitwuzlaBitwuzlaBitwuzla - - + + Bitwuzla - - + + Bitwuzla - + @@ -131,7 +131,7 @@

    QF_ABV (Single Query Track)

    - + Bitwuzla 0 1804 @@ -142,7 +142,7 @@

    QF_ABV (Single Query Track)

    - + 2020-Bitwuzlan 0 1804 @@ -153,7 +153,7 @@

    QF_ABV (Single Query Track)

    - + Yices2 0 1792 @@ -164,7 +164,7 @@

    QF_ABV (Single Query Track)

    - + MathSATn 0 1749 @@ -175,7 +175,7 @@

    QF_ABV (Single Query Track)

    - + z3-4.8.17n 0 1730 @@ -186,7 +186,7 @@

    QF_ABV (Single Query Track)

    - + cvc5 0 1728 @@ -208,7 +208,7 @@

    QF_ABV (Single Query Track)

    - + Bitwuzla 0 180443384.62943377.18180411476572626 @@ -217,7 +217,7 @@

    QF_ABV (Single Query Track)

    - + 2020-Bitwuzlan 0 180444689.1644697.693180411476572626 @@ -226,7 +226,7 @@

    QF_ABV (Single Query Track)

    - + Yices2 0 179264893.71564879.592179211406523838 @@ -235,7 +235,7 @@

    QF_ABV (Single Query Track)

    - + MathSATn 0 1749115676.954115665.978174911106398176 @@ -244,7 +244,7 @@

    QF_ABV (Single Query Track)

    - + z3-4.8.17n 0 1730147056.516147005.39917301100630100100 @@ -253,7 +253,7 @@

    QF_ABV (Single Query Track)

    - + cvc5 0 1728140179.512140337.70317281101627102102 @@ -273,7 +273,7 @@

    QF_ABV (Single Query Track)

    - + Bitwuzla 0 11478146.7698138.722114711470268126 @@ -282,7 +282,7 @@

    QF_ABV (Single Query Track)

    - + 2020-Bitwuzlan 0 11478672.5018677.686114711470268126 @@ -291,7 +291,7 @@

    QF_ABV (Single Query Track)

    - + Yices2 0 114019095.06819081.06114011400968138 @@ -300,7 +300,7 @@

    QF_ABV (Single Query Track)

    - + MathSATn 0 111057217.68757215.9681110111003968176 @@ -309,7 +309,7 @@

    QF_ABV (Single Query Track)

    - + cvc5 0 110166281.57766448.68411011101048681102 @@ -318,7 +318,7 @@

    QF_ABV (Single Query Track)

    - + z3-4.8.17n 0 110074673.09174674.95111001100049681100 @@ -338,7 +338,7 @@

    QF_ABV (Single Query Track)

    - + Bitwuzla 0 65735237.86135238.458657065724114926 @@ -347,7 +347,7 @@

    QF_ABV (Single Query Track)

    - + 2020-Bitwuzlan 0 65736016.65936020.007657065724114926 @@ -356,7 +356,7 @@

    QF_ABV (Single Query Track)

    - + Yices2 0 65245798.64745798.531652065229114938 @@ -365,7 +365,7 @@

    QF_ABV (Single Query Track)

    - + MathSATn 0 63958459.26758450.01639063942114976 @@ -374,7 +374,7 @@

    QF_ABV (Single Query Track)

    - + z3-4.8.17n 0 63072383.42572330.4486300630511149100 @@ -383,7 +383,7 @@

    QF_ABV (Single Query Track)

    - + cvc5 0 62773897.93473889.0196270627541149102 @@ -403,7 +403,7 @@

    QF_ABV (Single Query Track)

    - + Bitwuzla 0 17363403.6553393.456173611036339494 @@ -412,7 +412,7 @@

    QF_ABV (Single Query Track)

    - + 2020-Bitwuzlan 0 17333377.3933382.386173311006339797 @@ -421,7 +421,7 @@

    QF_ABV (Single Query Track)

    - + Yices2 0 17193847.7693830.15817191103616111111 @@ -430,7 +430,7 @@

    QF_ABV (Single Query Track)

    - + MathSATn 0 16636327.3216312.92716631062601167162 @@ -439,7 +439,7 @@

    QF_ABV (Single Query Track)

    - + cvc5 0 16538332.9598266.67616531056597177177 @@ -448,7 +448,7 @@

    QF_ABV (Single Query Track)

    - + z3-4.8.17n 0 16505832.6885809.30516501054596180180 @@ -472,7 +472,6 @@

    QF_ABV (Single Query Track)

    - + - diff --git a/archive/2022/results/qf-abv-unsat-core.html b/archive/2022/results/qf-abv-unsat-core.html index cfd52c56..cdafccd0 100644 --- a/archive/2022/results/qf-abv-unsat-core.html +++ b/archive/2022/results/qf-abv-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ABV (Unsat Core Track)

    Competition results for the QF_ABV - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_ABV (Unsat Core Track)

    Sequential PerformanceParallel Performance BitwuzlaBitwuzla - - + + @@ -126,7 +126,7 @@

    QF_ABV (Unsat Core Track)

    - + Bitwuzla 0 190264 @@ -137,7 +137,7 @@

    QF_ABV (Unsat Core Track)

    - + 2021-Bitwuzlan 0 187186 @@ -148,7 +148,7 @@

    QF_ABV (Unsat Core Track)

    - + Yices2 0 180360 @@ -159,7 +159,7 @@

    QF_ABV (Unsat Core Track)

    - + z3-4.8.17n 0 172030 @@ -170,7 +170,7 @@

    QF_ABV (Unsat Core Track)

    - + cvc5 0 127243 @@ -181,7 +181,7 @@

    QF_ABV (Unsat Core Track)

    - + MathSATn 0 73 @@ -203,7 +203,7 @@

    QF_ABV (Unsat Core Track)

    - + Bitwuzla 0 19026422728.89522706.86510 @@ -212,7 +212,7 @@

    QF_ABV (Unsat Core Track)

    - + 2021-Bitwuzlan 0 18718623816.53223712.36612 @@ -221,7 +221,7 @@

    QF_ABV (Unsat Core Track)

    - + Yices2 0 18036024963.06424932.59614 @@ -230,7 +230,7 @@

    QF_ABV (Unsat Core Track)

    - + z3-4.8.17n 0 17203035692.47835638.15624 @@ -239,7 +239,7 @@

    QF_ABV (Unsat Core Track)

    - + cvc5 0 12724329931.25629898.76819 @@ -248,7 +248,7 @@

    QF_ABV (Unsat Core Track)

    - + MathSATn 0 7346.91947.910 @@ -272,7 +272,6 @@

    QF_ABV (Unsat Core Track)

    - + - diff --git a/archive/2022/results/qf-abvfp-incremental.html b/archive/2022/results/qf-abvfp-incremental.html index ed9d4f2d..70ae3496 100644 --- a/archive/2022/results/qf-abvfp-incremental.html +++ b/archive/2022/results/qf-abvfp-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ABVFP (Incremental Track)

    Competition results for the QF_ABVFP - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_ABVFP (Incremental Track)

    Parallel Performance Bitwuzla - - + + @@ -124,7 +124,7 @@

    QF_ABVFP (Incremental Track)

    - + Bitwuzla 0 27389166880.03966767.4096034 @@ -133,7 +133,7 @@

    QF_ABVFP (Incremental Track)

    - + 2021-Bitwuzla - fixedn 0 27389168594.30568332.1376034 @@ -142,7 +142,7 @@

    QF_ABVFP (Incremental Track)

    - + MathSATn 0 273400171788.06171171.53755149 @@ -151,7 +151,7 @@

    QF_ABVFP (Incremental Track)

    - + cvc5 0 2419091937456.141936886.023320421322 @@ -175,7 +175,6 @@

    QF_ABVFP (Incremental Track)

    - + - diff --git a/archive/2022/results/qf-abvfp-proof-exhibition.html b/archive/2022/results/qf-abvfp-proof-exhibition.html index aecc6ffd..b1d38e8d 100644 --- a/archive/2022/results/qf-abvfp-proof-exhibition.html +++ b/archive/2022/results/qf-abvfp-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ABVFP (Proof Exhibition Track)

    Competition results for the QF_ABVFP - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_ABVFP (Proof Exhibition Track)

    - + cvc5-lfsc 0 327 @@ -130,7 +130,7 @@

    QF_ABVFP (Proof Exhibition Track)

    - + cvc5 0 295 @@ -152,7 +152,7 @@

    QF_ABVFP (Proof Exhibition Track)

    - + cvc5-lfsc 0 3271941534.6031941660.016461573 @@ -161,7 +161,7 @@

    QF_ABVFP (Proof Exhibition Track)

    - + cvc5 0 2951994232.9581994106.12116781616 @@ -185,7 +185,6 @@

    QF_ABVFP (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/qf-abvfp-single-query.html b/archive/2022/results/qf-abvfp-single-query.html index b4585c68..d896e7e6 100644 --- a/archive/2022/results/qf-abvfp-single-query.html +++ b/archive/2022/results/qf-abvfp-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ABVFP (Single Query Track)

    Competition results for the QF_ABVFP - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_ABVFP (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) BitwuzlaBitwuzlaBitwuzla - - + + Bitwuzla - - + + Bitwuzla - + @@ -131,7 +131,7 @@

    QF_ABVFP (Single Query Track)

    - + Bitwuzla 0 600 @@ -142,7 +142,7 @@

    QF_ABVFP (Single Query Track)

    - + MathSATn 0 595 @@ -153,7 +153,7 @@

    QF_ABVFP (Single Query Track)

    - + 2021-cvc5n 0 593 @@ -164,7 +164,7 @@

    QF_ABVFP (Single Query Track)

    - + cvc5 0 591 @@ -175,7 +175,7 @@

    QF_ABVFP (Single Query Track)

    - + z3-4.8.17n 0 555 @@ -186,7 +186,7 @@

    QF_ABVFP (Single Query Track)

    - + COLIBRI 0 495 @@ -208,7 +208,7 @@

    QF_ABVFP (Single Query Track)

    - + Bitwuzla 0 6002151.742131.77560010449600 @@ -217,7 +217,7 @@

    QF_ABVFP (Single Query Track)

    - + MathSATn 0 59512889.37412815.35759510449155 @@ -226,7 +226,7 @@

    QF_ABVFP (Single Query Track)

    - + 2021-cvc5n 0 59319334.40619232.35559310249177 @@ -235,7 +235,7 @@

    QF_ABVFP (Single Query Track)

    - + cvc5 0 59121917.11721817.01559110248999 @@ -244,7 +244,7 @@

    QF_ABVFP (Single Query Track)

    - + z3-4.8.17n 0 55593372.63493311.9075551014544544 @@ -253,7 +253,7 @@

    QF_ABVFP (Single Query Track)

    - + COLIBRI 0 49541261.91941243.6584959639910530 @@ -273,7 +273,7 @@

    QF_ABVFP (Single Query Track)

    - + Bitwuzla 0 104146.167146.244104104004960 @@ -282,7 +282,7 @@

    QF_ABVFP (Single Query Track)

    - + MathSATn 0 104792.027789.155104104004965 @@ -291,7 +291,7 @@

    QF_ABVFP (Single Query Track)

    - + 2021-cvc5n 0 1022886.4812872.306102102024967 @@ -300,7 +300,7 @@

    QF_ABVFP (Single Query Track)

    - + cvc5 0 1023567.1263547.969102102024969 @@ -309,7 +309,7 @@

    QF_ABVFP (Single Query Track)

    - + z3-4.8.17n 0 1018588.5288590.0221011010349644 @@ -318,7 +318,7 @@

    QF_ABVFP (Single Query Track)

    - + COLIBRI 0 963797.1923794.79396960849630 @@ -338,7 +338,7 @@

    QF_ABVFP (Single Query Track)

    - + Bitwuzla 0 4962005.5731985.531496049601040 @@ -347,7 +347,7 @@

    QF_ABVFP (Single Query Track)

    - + MathSATn 0 49112097.34712026.201491049151045 @@ -356,7 +356,7 @@

    QF_ABVFP (Single Query Track)

    - + 2021-cvc5n 0 49116447.92516360.049491049151047 @@ -365,7 +365,7 @@

    QF_ABVFP (Single Query Track)

    - + cvc5 0 48918349.99218269.046489048971049 @@ -374,7 +374,7 @@

    QF_ABVFP (Single Query Track)

    - + z3-4.8.17n 0 45484784.10584721.88545404544210444 @@ -383,7 +383,7 @@

    QF_ABVFP (Single Query Track)

    - + COLIBRI 0 39937464.72737448.86539903999710430 @@ -403,7 +403,7 @@

    QF_ABVFP (Single Query Track)

    - + Bitwuzla 0 5861029.0661008.945861034831414 @@ -412,7 +412,7 @@

    QF_ABVFP (Single Query Track)

    - + MathSATn 0 5692627.8582594.718569994703131 @@ -421,7 +421,7 @@

    QF_ABVFP (Single Query Track)

    - + 2021-cvc5n 0 5084334.0694230.9135081004089292 @@ -430,7 +430,7 @@

    QF_ABVFP (Single Query Track)

    - + cvc5 0 4914531.9274470.21249185406109109 @@ -439,7 +439,7 @@

    QF_ABVFP (Single Query Track)

    - + COLIBRI 0 4882611.9922593.3724889439411238 @@ -448,7 +448,7 @@

    QF_ABVFP (Single Query Track)

    - + z3-4.8.17n 0 3357859.6467856.57533574261265265 @@ -472,7 +472,6 @@

    QF_ABVFP (Single Query Track)

    - + - diff --git a/archive/2022/results/qf-abvfp-unsat-core.html b/archive/2022/results/qf-abvfp-unsat-core.html index 0241b090..9ba800e1 100644 --- a/archive/2022/results/qf-abvfp-unsat-core.html +++ b/archive/2022/results/qf-abvfp-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ABVFP (Unsat Core Track)

    Competition results for the QF_ABVFP - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_ABVFP (Unsat Core Track)

    Sequential PerformanceParallel Performance BitwuzlaBitwuzla - - + + @@ -126,7 +126,7 @@

    QF_ABVFP (Unsat Core Track)

    - + Bitwuzla 0 16561 @@ -137,7 +137,7 @@

    QF_ABVFP (Unsat Core Track)

    - + 2021-Bitwuzlan 0 16441 @@ -148,7 +148,7 @@

    QF_ABVFP (Unsat Core Track)

    - + z3-4.8.17n 0 16046 @@ -159,7 +159,7 @@

    QF_ABVFP (Unsat Core Track)

    - + cvc5 0 14095 @@ -170,7 +170,7 @@

    QF_ABVFP (Unsat Core Track)

    - + MathSATn 0 624 @@ -192,7 +192,7 @@

    QF_ABVFP (Unsat Core Track)

    - + Bitwuzla 0 165612781.2462780.8521 @@ -201,7 +201,7 @@

    QF_ABVFP (Unsat Core Track)

    - + 2021-Bitwuzlan 0 164413025.4793023.5851 @@ -210,7 +210,7 @@

    QF_ABVFP (Unsat Core Track)

    - + z3-4.8.17n 0 1604675098.7574975.24332 @@ -219,7 +219,7 @@

    QF_ABVFP (Unsat Core Track)

    - + cvc5 0 14095326056.852326024.666231 @@ -228,7 +228,7 @@

    QF_ABVFP (Unsat Core Track)

    - + MathSATn 0 624200.989199.4720 @@ -252,7 +252,6 @@

    QF_ABVFP (Unsat Core Track)

    - + - diff --git a/archive/2022/results/qf-abvfplra-incremental.html b/archive/2022/results/qf-abvfplra-incremental.html index 5cbe484c..b91289bf 100644 --- a/archive/2022/results/qf-abvfplra-incremental.html +++ b/archive/2022/results/qf-abvfplra-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ABVFPLRA (Incremental Track)

    Competition results for the QF_ABVFPLRA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_ABVFPLRA (Incremental Track)

    Parallel Performance cvc5 - - + + @@ -124,7 +124,7 @@

    QF_ABVFPLRA (Incremental Track)

    - + MathSATn 0 187675.29575.24900 @@ -133,7 +133,7 @@

    QF_ABVFPLRA (Incremental Track)

    - + cvc5 0 1876155.864155.65400 @@ -142,7 +142,7 @@

    QF_ABVFPLRA (Incremental Track)

    - + Bitwuzla 0 1876356.278356.31200 @@ -166,7 +166,6 @@

    QF_ABVFPLRA (Incremental Track)

    - + - diff --git a/archive/2022/results/qf-abvfplra-proof-exhibition.html b/archive/2022/results/qf-abvfplra-proof-exhibition.html index 32202cf3..e0864aba 100644 --- a/archive/2022/results/qf-abvfplra-proof-exhibition.html +++ b/archive/2022/results/qf-abvfplra-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ABVFPLRA (Proof Exhibition Track)

    Competition results for the QF_ABVFPLRA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_ABVFPLRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 0 @@ -130,7 +130,7 @@

    QF_ABVFPLRA (Proof Exhibition Track)

    - + cvc5 0 0 @@ -152,7 +152,7 @@

    QF_ABVFPLRA (Proof Exhibition Track)

    - + cvc5 0 06000.06000.055 @@ -161,7 +161,7 @@

    QF_ABVFPLRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 06000.06000.055 @@ -185,7 +185,6 @@

    QF_ABVFPLRA (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/qf-abvfplra-single-query.html b/archive/2022/results/qf-abvfplra-single-query.html index f98b4433..6a11f22c 100644 --- a/archive/2022/results/qf-abvfplra-single-query.html +++ b/archive/2022/results/qf-abvfplra-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ABVFPLRA (Single Query Track)

    Competition results for the QF_ABVFPLRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_ABVFPLRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5COLIBRI - - + + cvc5 - - + + COLIBRI - + @@ -131,7 +131,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + cvc5 0 25 @@ -142,7 +142,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + 2021-cvc5n 0 25 @@ -153,7 +153,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + COLIBRI 0 24 @@ -164,7 +164,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + z3-4.8.17n 0 19 @@ -175,7 +175,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + MathSATn 0 14 @@ -186,7 +186,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + Bitwuzla 0 9 @@ -208,7 +208,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + cvc5 0 25401.889401.9532520500 @@ -217,7 +217,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + 2021-cvc5n 0 25411.635411.6792520500 @@ -226,7 +226,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + COLIBRI 0 241246.2141238.2892420411 @@ -235,7 +235,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + z3-4.8.17n 0 1912330.34212331.1981915466 @@ -244,7 +244,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + MathSATn 0 141214.5961214.98814140110 @@ -253,7 +253,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + Bitwuzla 0 92.312.441990160 @@ -273,7 +273,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + COLIBRI 0 2030.85822.93220200051 @@ -282,7 +282,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + cvc5 0 20130.867130.89820200050 @@ -291,7 +291,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + 2021-cvc5n 0 20157.807157.82320200050 @@ -300,7 +300,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + z3-4.8.17n 0 1510300.11110300.97715150556 @@ -309,7 +309,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + MathSATn 0 141213.1491213.53614140650 @@ -318,7 +318,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + Bitwuzla 0 92.2872.3849901150 @@ -338,7 +338,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + 2021-cvc5n 0 5253.828253.8565050200 @@ -347,7 +347,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + cvc5 0 5271.022271.0565050200 @@ -356,7 +356,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + COLIBRI 0 41215.3561215.3564041201 @@ -365,7 +365,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + z3-4.8.17n 0 42030.2312030.2214041206 @@ -374,7 +374,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + Bitwuzla 0 00.0230.0570005200 @@ -383,7 +383,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + MathSATn 0 01.4471.4520005200 @@ -403,7 +403,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + COLIBRI 0 2470.21462.2892420411 @@ -412,7 +412,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + 2021-cvc5n 0 21137.812137.8172118344 @@ -421,7 +421,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + cvc5 0 21139.932139.9422118344 @@ -430,7 +430,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + MathSATn 0 1348.08648.09813130121 @@ -439,7 +439,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + Bitwuzla 0 92.312.441990160 @@ -448,7 +448,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + z3-4.8.17n 0 9470.135470.1349811616 @@ -472,7 +472,6 @@

    QF_ABVFPLRA (Single Query Track)

    - + - diff --git a/archive/2022/results/qf-abvfplra-unsat-core.html b/archive/2022/results/qf-abvfplra-unsat-core.html index da370311..c30b97e1 100644 --- a/archive/2022/results/qf-abvfplra-unsat-core.html +++ b/archive/2022/results/qf-abvfplra-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ABVFPLRA (Unsat Core Track)

    Competition results for the QF_ABVFPLRA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_ABVFPLRA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    QF_ABVFPLRA (Unsat Core Track)

    - + cvc5 0 1933 @@ -137,7 +137,7 @@

    QF_ABVFPLRA (Unsat Core Track)

    - + z3-4.8.17n 0 1269 @@ -148,7 +148,7 @@

    QF_ABVFPLRA (Unsat Core Track)

    - + 2021-Bitwuzlan 0 0 @@ -159,7 +159,7 @@

    QF_ABVFPLRA (Unsat Core Track)

    - + Bitwuzla 0 0 @@ -170,7 +170,7 @@

    QF_ABVFPLRA (Unsat Core Track)

    - + MathSATn 0 0 @@ -192,7 +192,7 @@

    QF_ABVFPLRA (Unsat Core Track)

    - + cvc5 0 1933965.465965.7970 @@ -201,7 +201,7 @@

    QF_ABVFPLRA (Unsat Core Track)

    - + z3-4.8.17n 0 12691765.3251765.5361 @@ -210,7 +210,7 @@

    QF_ABVFPLRA (Unsat Core Track)

    - + 2021-Bitwuzlan 0 00.0230.0590 @@ -219,7 +219,7 @@

    QF_ABVFPLRA (Unsat Core Track)

    - + Bitwuzla 0 00.0230.0660 @@ -228,7 +228,7 @@

    QF_ABVFPLRA (Unsat Core Track)

    - + MathSATn 0 00.1550.1560 @@ -252,7 +252,6 @@

    QF_ABVFPLRA (Unsat Core Track)

    - + - diff --git a/archive/2022/results/qf-alia-incremental.html b/archive/2022/results/qf-alia-incremental.html index 134b4570..b83acb69 100644 --- a/archive/2022/results/qf-alia-incremental.html +++ b/archive/2022/results/qf-alia-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ALIA (Incremental Track)

    Competition results for the QF_ALIA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_ALIA (Incremental Track)

    Parallel Performance smtinterpol - - + + @@ -124,7 +124,7 @@

    QF_ALIA (Incremental Track)

    - + 2020-z3n 0 530398819.075716.18800 @@ -133,7 +133,7 @@

    QF_ALIA (Incremental Track)

    - + z3-4.8.17n 0 530398896.628799.23400 @@ -142,7 +142,7 @@

    QF_ALIA (Incremental Track)

    - + smtinterpol 0 5303984437.183224.77200 @@ -151,7 +151,7 @@

    QF_ALIA (Incremental Track)

    - + Yices2 0 5303821611.8221532.358161 @@ -160,7 +160,7 @@

    QF_ALIA (Incremental Track)

    - + cvc5 0 5271274818.6874634.69332711 @@ -169,7 +169,7 @@

    QF_ALIA (Incremental Track)

    - + MathSATn 0 5005601084.257969.824298380 @@ -193,7 +193,6 @@

    QF_ALIA (Incremental Track)

    - + - diff --git a/archive/2022/results/qf-alia-proof-exhibition.html b/archive/2022/results/qf-alia-proof-exhibition.html index 8c904e17..b36aba04 100644 --- a/archive/2022/results/qf-alia-proof-exhibition.html +++ b/archive/2022/results/qf-alia-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ALIA (Proof Exhibition Track)

    Competition results for the QF_ALIA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_ALIA (Proof Exhibition Track)

    - + smtinterpol 0 72 @@ -130,7 +130,7 @@

    QF_ALIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 69 @@ -141,7 +141,7 @@

    QF_ALIA (Proof Exhibition Track)

    - + veriT 0 16 @@ -152,7 +152,7 @@

    QF_ALIA (Proof Exhibition Track)

    - + cvc5 0 15 @@ -174,7 +174,7 @@

    QF_ALIA (Proof Exhibition Track)

    - + smtinterpol 0 72766.139451.02100 @@ -183,7 +183,7 @@

    QF_ALIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 694931.9394927.7433 @@ -192,7 +192,7 @@

    QF_ALIA (Proof Exhibition Track)

    - + veriT 0 165.6925.167560 @@ -201,7 +201,7 @@

    QF_ALIA (Proof Exhibition Track)

    - + cvc5 0 1566757.87566741.8275755 @@ -225,7 +225,6 @@

    QF_ALIA (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/qf-alia-single-query.html b/archive/2022/results/qf-alia-single-query.html index 2129fc35..3857546e 100644 --- a/archive/2022/results/qf-alia-single-query.html +++ b/archive/2022/results/qf-alia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ALIA (Single Query Track)

    Competition results for the QF_ALIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_ALIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Yices2Yices2Yices2 - - + + Yices2 - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_ALIA (Single Query Track)

    - + Yices2 0 116 @@ -142,7 +142,7 @@

    QF_ALIA (Single Query Track)

    - + MathSATn 0 116 @@ -153,7 +153,7 @@

    QF_ALIA (Single Query Track)

    - + 2021-SMTInterpoln 0 116 @@ -164,7 +164,7 @@

    QF_ALIA (Single Query Track)

    - + smtinterpol 0 116 @@ -175,7 +175,7 @@

    QF_ALIA (Single Query Track)

    - + z3-4.8.17n 0 115 @@ -186,7 +186,7 @@

    QF_ALIA (Single Query Track)

    - + cvc5 0 94 @@ -197,7 +197,7 @@

    QF_ALIA (Single Query Track)

    - + veriT 0 6 @@ -219,7 +219,7 @@

    QF_ALIA (Single Query Track)

    - + Yices2 0 11674.12474.465116546200 @@ -228,7 +228,7 @@

    QF_ALIA (Single Query Track)

    - + MathSATn 0 116103.037102.514116546200 @@ -237,7 +237,7 @@

    QF_ALIA (Single Query Track)

    - + 2021-SMTInterpoln 0 1161132.625615.916116546200 @@ -246,7 +246,7 @@

    QF_ALIA (Single Query Track)

    - + smtinterpol 0 1161247.362687.688116546200 @@ -255,7 +255,7 @@

    QF_ALIA (Single Query Track)

    - + z3-4.8.17n 0 1152934.2862934.285115546111 @@ -264,7 +264,7 @@

    QF_ALIA (Single Query Track)

    - + cvc5 0 9429316.45829317.339434602222 @@ -273,7 +273,7 @@

    QF_ALIA (Single Query Track)

    - + veriT 0 64.5214.5526061100 @@ -293,7 +293,7 @@

    QF_ALIA (Single Query Track)

    - + MathSATn 0 5462.28561.749545400620 @@ -302,7 +302,7 @@

    QF_ALIA (Single Query Track)

    - + Yices2 0 5467.40767.43545400620 @@ -311,7 +311,7 @@

    QF_ALIA (Single Query Track)

    - + smtinterpol 0 54454.092167.64545400620 @@ -320,7 +320,7 @@

    QF_ALIA (Single Query Track)

    - + 2021-SMTInterpoln 0 54467.546171.431545400620 @@ -329,7 +329,7 @@

    QF_ALIA (Single Query Track)

    - + z3-4.8.17n 0 541616.841616.92545400621 @@ -338,7 +338,7 @@

    QF_ALIA (Single Query Track)

    - + cvc5 0 3424489.68224489.84634340206222 @@ -347,7 +347,7 @@

    QF_ALIA (Single Query Track)

    - + veriT 0 02.5662.55900054620 @@ -367,7 +367,7 @@

    QF_ALIA (Single Query Track)

    - + Yices2 0 626.7177.035620620540 @@ -376,7 +376,7 @@

    QF_ALIA (Single Query Track)

    - + MathSATn 0 6240.75240.765620620540 @@ -385,7 +385,7 @@

    QF_ALIA (Single Query Track)

    - + 2021-SMTInterpoln 0 62665.079444.484620620540 @@ -394,7 +394,7 @@

    QF_ALIA (Single Query Track)

    - + smtinterpol 0 62793.27520.048620620540 @@ -403,7 +403,7 @@

    QF_ALIA (Single Query Track)

    - + z3-4.8.17n 0 611317.4461317.365610611541 @@ -412,7 +412,7 @@

    QF_ALIA (Single Query Track)

    - + cvc5 0 604826.7764827.4846006025422 @@ -421,7 +421,7 @@

    QF_ALIA (Single Query Track)

    - + veriT 0 61.9541.99360656540 @@ -441,7 +441,7 @@

    QF_ALIA (Single Query Track)

    - + Yices2 0 11674.12474.465116546200 @@ -450,7 +450,7 @@

    QF_ALIA (Single Query Track)

    - + MathSATn 0 116103.037102.514116546200 @@ -459,7 +459,7 @@

    QF_ALIA (Single Query Track)

    - + 2021-SMTInterpoln 0 111798.677380.218111545755 @@ -468,7 +468,7 @@

    QF_ALIA (Single Query Track)

    - + smtinterpol 0 111815.835394.099111545755 @@ -477,7 +477,7 @@

    QF_ALIA (Single Query Track)

    - + z3-4.8.17n 0 96645.499645.3519636602020 @@ -486,7 +486,7 @@

    QF_ALIA (Single Query Track)

    - + cvc5 0 81859.694859.6788133483535 @@ -495,7 +495,7 @@

    QF_ALIA (Single Query Track)

    - + veriT 0 64.5214.5526061100 @@ -519,7 +519,6 @@

    QF_ALIA (Single Query Track)

    - + - diff --git a/archive/2022/results/qf-alia-unsat-core.html b/archive/2022/results/qf-alia-unsat-core.html index 7d5415d9..732ce4ba 100644 --- a/archive/2022/results/qf-alia-unsat-core.html +++ b/archive/2022/results/qf-alia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ALIA (Unsat Core Track)

    Competition results for the QF_ALIA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_ALIA (Unsat Core Track)

    Sequential PerformanceParallel Performance smtinterpolsmtinterpol - - + + @@ -126,7 +126,7 @@

    QF_ALIA (Unsat Core Track)

    - + z3-4.8.17n 0 654 @@ -137,7 +137,7 @@

    QF_ALIA (Unsat Core Track)

    - + smtinterpol 0 633 @@ -148,7 +148,7 @@

    QF_ALIA (Unsat Core Track)

    - + Yices2 0 594 @@ -159,7 +159,7 @@

    QF_ALIA (Unsat Core Track)

    - + MathSATn 0 553 @@ -170,7 +170,7 @@

    QF_ALIA (Unsat Core Track)

    - + 2021-MathSAT5n 0 553 @@ -181,7 +181,7 @@

    QF_ALIA (Unsat Core Track)

    - + cvc5 0 0 @@ -203,7 +203,7 @@

    QF_ALIA (Unsat Core Track)

    - + z3-4.8.17n 0 6540.9360.8880 @@ -212,7 +212,7 @@

    QF_ALIA (Unsat Core Track)

    - + smtinterpol 0 63332.65917.0410 @@ -221,7 +221,7 @@

    QF_ALIA (Unsat Core Track)

    - + Yices2 0 5940.1560.3790 @@ -230,7 +230,7 @@

    QF_ALIA (Unsat Core Track)

    - + MathSATn 0 5530.7690.7770 @@ -239,7 +239,7 @@

    QF_ALIA (Unsat Core Track)

    - + 2021-MathSAT5n 0 5530.7920.7960 @@ -248,7 +248,7 @@

    QF_ALIA (Unsat Core Track)

    - + cvc5 0 02.4512.4370 @@ -272,7 +272,6 @@

    QF_ALIA (Unsat Core Track)

    - + - diff --git a/archive/2022/results/qf-ania-incremental.html b/archive/2022/results/qf-ania-incremental.html index 5d8821e1..5056c13d 100644 --- a/archive/2022/results/qf-ania-incremental.html +++ b/archive/2022/results/qf-ania-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ANIA (Incremental Track)

    Competition results for the QF_ANIA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_ANIA (Incremental Track)

    Parallel Performance smtinterpol - - + + @@ -124,7 +124,7 @@

    QF_ANIA (Incremental Track)

    - + 2021-z3n 0 5172456.22752.38400 @@ -133,7 +133,7 @@

    QF_ANIA (Incremental Track)

    - + z3-4.8.17n 0 5172456.57652.63600 @@ -142,7 +142,7 @@

    QF_ANIA (Incremental Track)

    - + smtinterpol 0 51724274.02399.22600 @@ -151,7 +151,7 @@

    QF_ANIA (Incremental Track)

    - + cvc5 0 434291454.9661450.2182951 @@ -160,7 +160,7 @@

    QF_ANIA (Incremental Track)

    - + MathSATn 0 1210117.93717.249396230 @@ -184,7 +184,6 @@

    QF_ANIA (Incremental Track)

    - + - diff --git a/archive/2022/results/qf-ania-proof-exhibition.html b/archive/2022/results/qf-ania-proof-exhibition.html index 7c346513..52bf9339 100644 --- a/archive/2022/results/qf-ania-proof-exhibition.html +++ b/archive/2022/results/qf-ania-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ANIA (Proof Exhibition Track)

    Competition results for the QF_ANIA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_ANIA (Proof Exhibition Track)

    - + smtinterpol 0 17 @@ -130,7 +130,7 @@

    QF_ANIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 11 @@ -141,7 +141,7 @@

    QF_ANIA (Proof Exhibition Track)

    - + cvc5 0 2 @@ -163,7 +163,7 @@

    QF_ANIA (Proof Exhibition Track)

    - + smtinterpol 0 1712575.84712163.772109 @@ -172,7 +172,7 @@

    QF_ANIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 1120203.62520203.6381616 @@ -181,7 +181,7 @@

    QF_ANIA (Proof Exhibition Track)

    - + cvc5 0 230052.76230052.6642525 @@ -205,7 +205,6 @@

    QF_ANIA (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/qf-ania-single-query.html b/archive/2022/results/qf-ania-single-query.html index b54c85d1..ad74541e 100644 --- a/archive/2022/results/qf-ania-single-query.html +++ b/archive/2022/results/qf-ania-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ANIA (Single Query Track)

    Competition results for the QF_ANIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_ANIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) smtinterpolsmtinterpolsmtinterpol - - + + smtinterpol - - + + smtinterpol - + @@ -131,7 +131,7 @@

    QF_ANIA (Single Query Track)

    - + smtinterpol 0 88 @@ -142,7 +142,7 @@

    QF_ANIA (Single Query Track)

    - + MathSATn 0 80 @@ -153,7 +153,7 @@

    QF_ANIA (Single Query Track)

    - + 2020-CVC4n 0 76 @@ -164,7 +164,7 @@

    QF_ANIA (Single Query Track)

    - + cvc5 0 66 @@ -175,7 +175,7 @@

    QF_ANIA (Single Query Track)

    - + z3-4.8.17n 0 46 @@ -197,7 +197,7 @@

    QF_ANIA (Single Query Track)

    - + smtinterpol 0 8813512.8913079.3688873151410 @@ -206,7 +206,7 @@

    QF_ANIA (Single Query Track)

    - + MathSATn 0 8020904.94120905.1348070102215 @@ -215,7 +215,7 @@

    QF_ANIA (Single Query Track)

    - + 2020-CVC4n 0 7634155.54734155.9647664122626 @@ -224,7 +224,7 @@

    QF_ANIA (Single Query Track)

    - + cvc5 0 6646001.84545983.5586655113636 @@ -233,7 +233,7 @@

    QF_ANIA (Single Query Track)

    - + z3-4.8.17n 0 4669824.24769824.5974632145656 @@ -253,7 +253,7 @@

    QF_ANIA (Single Query Track)

    - + smtinterpol 0 73702.269475.7127373032610 @@ -262,7 +262,7 @@

    QF_ANIA (Single Query Track)

    - + MathSATn 0 70376.589376.6267070062615 @@ -271,7 +271,7 @@

    QF_ANIA (Single Query Track)

    - + 2020-CVC4n 0 6416564.4616564.73164640122626 @@ -280,7 +280,7 @@

    QF_ANIA (Single Query Track)

    - + cvc5 0 5527379.31427360.97155550212636 @@ -289,7 +289,7 @@

    QF_ANIA (Single Query Track)

    - + z3-4.8.17n 0 3255220.36955220.70332320442656 @@ -309,7 +309,7 @@

    QF_ANIA (Single Query Track)

    - + smtinterpol 0 1512810.62112603.65615015117610 @@ -318,7 +318,7 @@

    QF_ANIA (Single Query Track)

    - + z3-4.8.17n 0 1414603.87814603.89414014127656 @@ -327,7 +327,7 @@

    QF_ANIA (Single Query Track)

    - + 2020-CVC4n 0 1217591.08717591.23412012147626 @@ -336,7 +336,7 @@

    QF_ANIA (Single Query Track)

    - + cvc5 0 1118622.53118622.58711011157636 @@ -345,7 +345,7 @@

    QF_ANIA (Single Query Track)

    - + MathSATn 0 1020528.35320528.50710010167615 @@ -365,7 +365,7 @@

    QF_ANIA (Single Query Track)

    - + smtinterpol 0 82811.012568.8328272102016 @@ -374,7 +374,7 @@

    QF_ANIA (Single Query Track)

    - + MathSATn 0 73634.74634.753736762922 @@ -383,7 +383,7 @@

    QF_ANIA (Single Query Track)

    - + 2020-CVC4n 0 611112.9881112.982615564141 @@ -392,7 +392,7 @@

    QF_ANIA (Single Query Track)

    - + cvc5 0 501408.7331390.147504555252 @@ -401,7 +401,7 @@

    QF_ANIA (Single Query Track)

    - + z3-4.8.17n 0 331766.1181766.0683321126969 @@ -425,7 +425,6 @@

    QF_ANIA (Single Query Track)

    - + - diff --git a/archive/2022/results/qf-ania-unsat-core.html b/archive/2022/results/qf-ania-unsat-core.html index de12d818..09ac4b9b 100644 --- a/archive/2022/results/qf-ania-unsat-core.html +++ b/archive/2022/results/qf-ania-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ANIA (Unsat Core Track)

    Competition results for the QF_ANIA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_ANIA (Unsat Core Track)

    Sequential PerformanceParallel Performance smtinterpolsmtinterpol - - + + @@ -126,7 +126,7 @@

    QF_ANIA (Unsat Core Track)

    - + 2021-cvc5-ucn 0 58971 @@ -137,7 +137,7 @@

    QF_ANIA (Unsat Core Track)

    - + smtinterpol 0 48802 @@ -148,7 +148,7 @@

    QF_ANIA (Unsat Core Track)

    - + MathSATn 0 44048 @@ -159,7 +159,7 @@

    QF_ANIA (Unsat Core Track)

    - + z3-4.8.17n 0 16405 @@ -170,7 +170,7 @@

    QF_ANIA (Unsat Core Track)

    - + cvc5 0 15302 @@ -192,7 +192,7 @@

    QF_ANIA (Unsat Core Track)

    - + 2021-cvc5-ucn 0 5897118387.35118387.93813 @@ -201,7 +201,7 @@

    QF_ANIA (Unsat Core Track)

    - + smtinterpol 0 4880212812.15712602.06410 @@ -210,7 +210,7 @@

    QF_ANIA (Unsat Core Track)

    - + MathSATn 0 4404821639.77621639.97417 @@ -219,7 +219,7 @@

    QF_ANIA (Unsat Core Track)

    - + z3-4.8.17n 0 1640514863.11414863.6511 @@ -228,7 +228,7 @@

    QF_ANIA (Unsat Core Track)

    - + cvc5 0 1530218081.40118081.73414 @@ -252,7 +252,6 @@

    QF_ANIA (Unsat Core Track)

    - + - diff --git a/archive/2022/results/qf-aufbv-incremental.html b/archive/2022/results/qf-aufbv-incremental.html index 649b8b11..8d02637a 100644 --- a/archive/2022/results/qf-aufbv-incremental.html +++ b/archive/2022/results/qf-aufbv-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AUFBV (Incremental Track)

    Competition results for the QF_AUFBV - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_AUFBV (Incremental Track)

    Parallel Performance Yices2 - - + + @@ -124,7 +124,7 @@

    QF_AUFBV (Incremental Track)

    - + 2020-Yices2 incrementaln 0 94610066.22110066.053216 @@ -133,7 +133,7 @@

    QF_AUFBV (Incremental Track)

    - + Yices2 0 94610140.7710137.18216 @@ -142,7 +142,7 @@

    QF_AUFBV (Incremental Track)

    - + z3-4.8.17n 0 86516833.11816833.35910212 @@ -151,7 +151,7 @@

    QF_AUFBV (Incremental Track)

    - + MathSATn 0 79618539.22418539.31317114 @@ -160,7 +160,7 @@

    QF_AUFBV (Incremental Track)

    - + cvc5 0 32519779.40519779.09264216 @@ -169,7 +169,7 @@

    QF_AUFBV (Incremental Track)

    - + Bitwuzla 0 123133.757133.8458440 @@ -193,7 +193,6 @@

    QF_AUFBV (Incremental Track)

    - + - diff --git a/archive/2022/results/qf-aufbv-proof-exhibition.html b/archive/2022/results/qf-aufbv-proof-exhibition.html index 6d8aac48..35f95b68 100644 --- a/archive/2022/results/qf-aufbv-proof-exhibition.html +++ b/archive/2022/results/qf-aufbv-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AUFBV (Proof Exhibition Track)

    Competition results for the QF_AUFBV - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_AUFBV (Proof Exhibition Track)

    - + cvc5-lfsc 0 28 @@ -130,7 +130,7 @@

    QF_AUFBV (Proof Exhibition Track)

    - + cvc5 0 2 @@ -152,7 +152,7 @@

    QF_AUFBV (Proof Exhibition Track)

    - + cvc5-lfsc 0 2814880.38414851.3111212 @@ -161,7 +161,7 @@

    QF_AUFBV (Proof Exhibition Track)

    - + cvc5 0 245600.13145600.1293838 @@ -185,7 +185,6 @@

    QF_AUFBV (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/qf-aufbv-single-query.html b/archive/2022/results/qf-aufbv-single-query.html index 1c542fc0..dbe87688 100644 --- a/archive/2022/results/qf-aufbv-single-query.html +++ b/archive/2022/results/qf-aufbv-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AUFBV (Single Query Track)

    Competition results for the QF_AUFBV - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_AUFBV (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Yices2Yices2Yices2 - - + + Yices2 - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_AUFBV (Single Query Track)

    - + Yices2 0 27 @@ -142,7 +142,7 @@

    QF_AUFBV (Single Query Track)

    - + z3-4.8.17n 0 21 @@ -153,7 +153,7 @@

    QF_AUFBV (Single Query Track)

    - + MathSATn 0 20 @@ -164,7 +164,7 @@

    QF_AUFBV (Single Query Track)

    - + Bitwuzla 0 19 @@ -175,7 +175,7 @@

    QF_AUFBV (Single Query Track)

    - + 2020-Bitwuzlan 0 18 @@ -186,7 +186,7 @@

    QF_AUFBV (Single Query Track)

    - + cvc5 0 14 @@ -208,7 +208,7 @@

    QF_AUFBV (Single Query Track)

    - + Yices2 0 2715253.19715253.4542712151111 @@ -217,7 +217,7 @@

    QF_AUFBV (Single Query Track)

    - + z3-4.8.17n 0 2120699.5220699.79217141716 @@ -226,7 +226,7 @@

    QF_AUFBV (Single Query Track)

    - + MathSATn 0 2024445.11124445.673208121818 @@ -235,7 +235,7 @@

    QF_AUFBV (Single Query Track)

    - + Bitwuzla 0 196077.8776078.33519109194 @@ -244,7 +244,7 @@

    QF_AUFBV (Single Query Track)

    - + 2020-Bitwuzlan 0 185416.1815416.50618108204 @@ -253,7 +253,7 @@

    QF_AUFBV (Single Query Track)

    - + cvc5 0 1430257.59730265.62914592424 @@ -273,7 +273,7 @@

    QF_AUFBV (Single Query Track)

    - + Yices2 0 127625.5057625.6881212052111 @@ -282,7 +282,7 @@

    QF_AUFBV (Single Query Track)

    - + 2020-Bitwuzlan 0 10575.176575.414101007214 @@ -291,7 +291,7 @@

    QF_AUFBV (Single Query Track)

    - + Bitwuzla 0 101237.1941237.604101007214 @@ -300,7 +300,7 @@

    QF_AUFBV (Single Query Track)

    - + MathSATn 0 811018.37311018.41588092118 @@ -309,7 +309,7 @@

    QF_AUFBV (Single Query Track)

    - + z3-4.8.17n 0 712399.53512399.592770102116 @@ -318,7 +318,7 @@

    QF_AUFBV (Single Query Track)

    - + cvc5 0 514714.53414714.552550122124 @@ -338,7 +338,7 @@

    QF_AUFBV (Single Query Track)

    - + Yices2 0 152827.6922827.7661501522111 @@ -347,7 +347,7 @@

    QF_AUFBV (Single Query Track)

    - + z3-4.8.17n 0 144430.9844431.1851401432116 @@ -356,7 +356,7 @@

    QF_AUFBV (Single Query Track)

    - + MathSATn 0 128626.7388627.2581201252118 @@ -365,7 +365,7 @@

    QF_AUFBV (Single Query Track)

    - + Bitwuzla 0 940.68340.7319098214 @@ -374,7 +374,7 @@

    QF_AUFBV (Single Query Track)

    - + cvc5 0 910743.06310751.07790982124 @@ -383,7 +383,7 @@

    QF_AUFBV (Single Query Track)

    - + 2020-Bitwuzlan 0 841.00541.0928089214 @@ -403,7 +403,7 @@

    QF_AUFBV (Single Query Track)

    - + Yices2 0 20464.835464.868208121818 @@ -412,7 +412,7 @@

    QF_AUFBV (Single Query Track)

    - + Bitwuzla 0 15209.79209.8191578238 @@ -421,7 +421,7 @@

    QF_AUFBV (Single Query Track)

    - + 2020-Bitwuzlan 0 14200.44200.5851477248 @@ -430,7 +430,7 @@

    QF_AUFBV (Single Query Track)

    - + MathSATn 0 14629.779629.80814682424 @@ -439,7 +439,7 @@

    QF_AUFBV (Single Query Track)

    - + z3-4.8.17n 0 12688.3688.29912482626 @@ -448,7 +448,7 @@

    QF_AUFBV (Single Query Track)

    - + cvc5 0 11692.142692.14211472727 @@ -472,7 +472,6 @@

    QF_AUFBV (Single Query Track)

    - + - diff --git a/archive/2022/results/qf-aufbv-unsat-core.html b/archive/2022/results/qf-aufbv-unsat-core.html index da318726..1af4f38d 100644 --- a/archive/2022/results/qf-aufbv-unsat-core.html +++ b/archive/2022/results/qf-aufbv-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AUFBV (Unsat Core Track)

    Competition results for the QF_AUFBV - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_AUFBV (Unsat Core Track)

    Sequential PerformanceParallel Performance Yices2Yices2 - - + + @@ -126,7 +126,7 @@

    QF_AUFBV (Unsat Core Track)

    - + Yices2 0 18189 @@ -137,7 +137,7 @@

    QF_AUFBV (Unsat Core Track)

    - + z3-4.8.17n 0 16747 @@ -148,7 +148,7 @@

    QF_AUFBV (Unsat Core Track)

    - + Bitwuzla 0 15256 @@ -159,7 +159,7 @@

    QF_AUFBV (Unsat Core Track)

    - + 2021-Bitwuzlan 0 15248 @@ -170,7 +170,7 @@

    QF_AUFBV (Unsat Core Track)

    - + cvc5 0 4519 @@ -181,7 +181,7 @@

    QF_AUFBV (Unsat Core Track)

    - + MathSATn 0 0 @@ -203,7 +203,7 @@

    QF_AUFBV (Unsat Core Track)

    - + Yices2 0 181894464.1764464.2953 @@ -212,7 +212,7 @@

    QF_AUFBV (Unsat Core Track)

    - + z3-4.8.17n 0 167474763.4794763.6243 @@ -221,7 +221,7 @@

    QF_AUFBV (Unsat Core Track)

    - + Bitwuzla 0 15256201.083201.1790 @@ -230,7 +230,7 @@

    QF_AUFBV (Unsat Core Track)

    - + 2021-Bitwuzlan 0 15248206.359206.4860 @@ -239,7 +239,7 @@

    QF_AUFBV (Unsat Core Track)

    - + cvc5 0 451910440.3810440.8368 @@ -248,7 +248,7 @@

    QF_AUFBV (Unsat Core Track)

    - + MathSATn 0 02.732.7340 @@ -272,7 +272,6 @@

    QF_AUFBV (Unsat Core Track)

    - + - diff --git a/archive/2022/results/qf-aufbvfp-single-query.html b/archive/2022/results/qf-aufbvfp-single-query.html index 658c2470..cfbaf6ad 100644 --- a/archive/2022/results/qf-aufbvfp-single-query.html +++ b/archive/2022/results/qf-aufbvfp-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AUFBVFP (Single Query Track)

    Competition results for the QF_AUFBVFP - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_AUFBVFP (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) BitwuzlaBitwuzlaBitwuzla - - + + — - - + + Bitwuzla - + @@ -131,7 +131,7 @@

    QF_AUFBVFP (Single Query Track)

    - + Bitwuzla 0 1 @@ -142,7 +142,7 @@

    QF_AUFBVFP (Single Query Track)

    - + MathSATn 0 1 @@ -153,7 +153,7 @@

    QF_AUFBVFP (Single Query Track)

    - + cvc5 0 1 @@ -164,7 +164,7 @@

    QF_AUFBVFP (Single Query Track)

    - + 2021-cvc5n 0 1 @@ -175,7 +175,7 @@

    QF_AUFBVFP (Single Query Track)

    - + z3-4.8.17n 0 1 @@ -197,7 +197,7 @@

    QF_AUFBVFP (Single Query Track)

    - + Bitwuzla 0 10.3360.33611000 @@ -206,7 +206,7 @@

    QF_AUFBVFP (Single Query Track)

    - + MathSATn 0 12.2672.26711000 @@ -215,7 +215,7 @@

    QF_AUFBVFP (Single Query Track)

    - + cvc5 0 12.5412.5411000 @@ -224,7 +224,7 @@

    QF_AUFBVFP (Single Query Track)

    - + 2021-cvc5n 0 16.6736.67311000 @@ -233,7 +233,7 @@

    QF_AUFBVFP (Single Query Track)

    - + z3-4.8.17n 0 189.21289.29211000 @@ -253,7 +253,7 @@

    QF_AUFBVFP (Single Query Track)

    - + Bitwuzla 0 10.3360.336110000 @@ -262,7 +262,7 @@

    QF_AUFBVFP (Single Query Track)

    - + MathSATn 0 12.2672.267110000 @@ -271,7 +271,7 @@

    QF_AUFBVFP (Single Query Track)

    - + cvc5 0 12.5412.54110000 @@ -280,7 +280,7 @@

    QF_AUFBVFP (Single Query Track)

    - + 2021-cvc5n 0 16.6736.673110000 @@ -289,7 +289,7 @@

    QF_AUFBVFP (Single Query Track)

    - + z3-4.8.17n 0 189.21289.292110000 @@ -309,7 +309,7 @@

    QF_AUFBVFP (Single Query Track)

    - + 2021-cvc5n 0 00.00.0000010 @@ -318,7 +318,7 @@

    QF_AUFBVFP (Single Query Track)

    - + Bitwuzla 0 00.00.0000010 @@ -327,7 +327,7 @@

    QF_AUFBVFP (Single Query Track)

    - + cvc5 0 00.00.0000010 @@ -336,7 +336,7 @@

    QF_AUFBVFP (Single Query Track)

    - + MathSATn 0 00.00.0000010 @@ -345,7 +345,7 @@

    QF_AUFBVFP (Single Query Track)

    - + z3-4.8.17n 0 00.00.0000010 @@ -365,7 +365,7 @@

    QF_AUFBVFP (Single Query Track)

    - + Bitwuzla 0 10.3360.33611000 @@ -374,7 +374,7 @@

    QF_AUFBVFP (Single Query Track)

    - + MathSATn 0 12.2672.26711000 @@ -383,7 +383,7 @@

    QF_AUFBVFP (Single Query Track)

    - + cvc5 0 12.5412.5411000 @@ -392,7 +392,7 @@

    QF_AUFBVFP (Single Query Track)

    - + 2021-cvc5n 0 16.6736.67311000 @@ -401,7 +401,7 @@

    QF_AUFBVFP (Single Query Track)

    - + z3-4.8.17n 0 024.024.000011 @@ -425,7 +425,6 @@

    QF_AUFBVFP (Single Query Track)

    - + - diff --git a/archive/2022/results/qf-auflia-incremental.html b/archive/2022/results/qf-auflia-incremental.html index cfb944f1..6784e9ed 100644 --- a/archive/2022/results/qf-auflia-incremental.html +++ b/archive/2022/results/qf-auflia-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AUFLIA (Incremental Track)

    Competition results for the QF_AUFLIA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_AUFLIA (Incremental Track)

    Parallel Performance smtinterpol - - + + @@ -124,7 +124,7 @@

    QF_AUFLIA (Incremental Track)

    - + 2020-z3n 0 46575404754.4294579.416423241 @@ -133,7 +133,7 @@

    QF_AUFLIA (Incremental Track)

    - + z3-4.8.17n 0 45197846672.7376505.4421800801 @@ -142,7 +142,7 @@

    QF_AUFLIA (Incremental Track)

    - + smtinterpol 0 371415017952.28116337.779857143 @@ -151,7 +151,7 @@

    QF_AUFLIA (Incremental Track)

    - + cvc5 0 361139526515.75726345.532108846911 @@ -160,7 +160,7 @@

    QF_AUFLIA (Incremental Track)

    - + Yices2 0 30506262396.5432234.04516492380 @@ -169,7 +169,7 @@

    QF_AUFLIA (Incremental Track)

    - + MathSATn 0 20464775801.9455669.37726533871 @@ -193,7 +193,6 @@

    QF_AUFLIA (Incremental Track)

    - + - diff --git a/archive/2022/results/qf-auflia-proof-exhibition.html b/archive/2022/results/qf-auflia-proof-exhibition.html index 1d7f6d88..a3bb8e2d 100644 --- a/archive/2022/results/qf-auflia-proof-exhibition.html +++ b/archive/2022/results/qf-auflia-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AUFLIA (Proof Exhibition Track)

    Competition results for the QF_AUFLIA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_AUFLIA (Proof Exhibition Track)

    - + smtinterpol 0 380 @@ -130,7 +130,7 @@

    QF_AUFLIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 363 @@ -141,7 +141,7 @@

    QF_AUFLIA (Proof Exhibition Track)

    - + cvc5 0 219 @@ -152,7 +152,7 @@

    QF_AUFLIA (Proof Exhibition Track)

    - + veriT 0 131 @@ -174,7 +174,7 @@

    QF_AUFLIA (Proof Exhibition Track)

    - + smtinterpol 0 3801005.865409.38500 @@ -183,7 +183,7 @@

    QF_AUFLIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 36328233.77228228.8151717 @@ -192,7 +192,7 @@

    QF_AUFLIA (Proof Exhibition Track)

    - + cvc5 0 219191037.564190970.028161157 @@ -201,7 +201,7 @@

    QF_AUFLIA (Proof Exhibition Track)

    - + veriT 0 13116.87614.6162490 @@ -225,7 +225,6 @@

    QF_AUFLIA (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/qf-auflia-single-query.html b/archive/2022/results/qf-auflia-single-query.html index d8761610..e3e93899 100644 --- a/archive/2022/results/qf-auflia-single-query.html +++ b/archive/2022/results/qf-auflia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AUFLIA (Single Query Track)

    Competition results for the QF_AUFLIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_AUFLIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Yices2Yices2Yices2 - - + + Yices2 - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_AUFLIA (Single Query Track)

    - + z3-4.8.17n 0 539 @@ -142,7 +142,7 @@

    QF_AUFLIA (Single Query Track)

    - + Yices2 0 539 @@ -153,7 +153,7 @@

    QF_AUFLIA (Single Query Track)

    - + cvc5 0 539 @@ -164,7 +164,7 @@

    QF_AUFLIA (Single Query Track)

    - + 2021-SMTInterpoln 0 539 @@ -175,7 +175,7 @@

    QF_AUFLIA (Single Query Track)

    - + smtinterpol 0 539 @@ -186,7 +186,7 @@

    QF_AUFLIA (Single Query Track)

    - + MathSATn 0 538 @@ -197,7 +197,7 @@

    QF_AUFLIA (Single Query Track)

    - + veriT 0 18 @@ -219,7 +219,7 @@

    QF_AUFLIA (Single Query Track)

    - + z3-4.8.17n 0 53919.56718.7153926227700 @@ -228,7 +228,7 @@

    QF_AUFLIA (Single Query Track)

    - + Yices2 0 53921.2923.24653926227700 @@ -237,7 +237,7 @@

    QF_AUFLIA (Single Query Track)

    - + cvc5 0 539337.051334.84853926227700 @@ -246,7 +246,7 @@

    QF_AUFLIA (Single Query Track)

    - + 2021-SMTInterpoln 0 539936.456396.39353926227700 @@ -255,7 +255,7 @@

    QF_AUFLIA (Single Query Track)

    - + smtinterpol 0 5391191.834550.68153926227700 @@ -264,7 +264,7 @@

    QF_AUFLIA (Single Query Track)

    - + MathSATn 0 5381549.8571546.2953826227611 @@ -273,7 +273,7 @@

    QF_AUFLIA (Single Query Track)

    - + veriT 0 1818.28117.855180185210 @@ -293,7 +293,7 @@

    QF_AUFLIA (Single Query Track)

    - + z3-4.8.17n 0 2626.145.703262262002770 @@ -302,7 +302,7 @@

    QF_AUFLIA (Single Query Track)

    - + Yices2 0 2626.2537.42262262002770 @@ -311,7 +311,7 @@

    QF_AUFLIA (Single Query Track)

    - + MathSATn 0 26238.44438.499262262002771 @@ -320,7 +320,7 @@

    QF_AUFLIA (Single Query Track)

    - + 2021-SMTInterpoln 0 262242.296122.97262262002770 @@ -329,7 +329,7 @@

    QF_AUFLIA (Single Query Track)

    - + cvc5 0 262149.027148.988262262002770 @@ -338,7 +338,7 @@

    QF_AUFLIA (Single Query Track)

    - + smtinterpol 0 262284.479168.947262262002770 @@ -347,7 +347,7 @@

    QF_AUFLIA (Single Query Track)

    - + veriT 0 09.6679.9530002622770 @@ -367,7 +367,7 @@

    QF_AUFLIA (Single Query Track)

    - + z3-4.8.17n 0 27713.42713.007277027702620 @@ -376,7 +376,7 @@

    QF_AUFLIA (Single Query Track)

    - + Yices2 0 27715.03715.826277027702620 @@ -385,7 +385,7 @@

    QF_AUFLIA (Single Query Track)

    - + cvc5 0 277188.024185.86277027702620 @@ -394,7 +394,7 @@

    QF_AUFLIA (Single Query Track)

    - + 2021-SMTInterpoln 0 277694.16273.422277027702620 @@ -403,7 +403,7 @@

    QF_AUFLIA (Single Query Track)

    - + smtinterpol 0 277907.354381.734277027702620 @@ -412,7 +412,7 @@

    QF_AUFLIA (Single Query Track)

    - + MathSATn 0 2761511.4131507.792276027612621 @@ -421,7 +421,7 @@

    QF_AUFLIA (Single Query Track)

    - + veriT 0 188.6147.902180182592620 @@ -441,7 +441,7 @@

    QF_AUFLIA (Single Query Track)

    - + z3-4.8.17n 0 53919.56718.7153926227700 @@ -450,7 +450,7 @@

    QF_AUFLIA (Single Query Track)

    - + Yices2 0 53921.2923.24653926227700 @@ -459,7 +459,7 @@

    QF_AUFLIA (Single Query Track)

    - + 2021-SMTInterpoln 0 539936.456396.39353926227700 @@ -468,7 +468,7 @@

    QF_AUFLIA (Single Query Track)

    - + smtinterpol 0 5391191.834550.68153926227700 @@ -477,7 +477,7 @@

    QF_AUFLIA (Single Query Track)

    - + MathSATn 0 537188.197184.56853726227522 @@ -486,7 +486,7 @@

    QF_AUFLIA (Single Query Track)

    - + cvc5 0 537330.518328.30853726227522 @@ -495,7 +495,7 @@

    QF_AUFLIA (Single Query Track)

    - + veriT 0 1818.28117.855180185210 @@ -519,7 +519,6 @@

    QF_AUFLIA (Single Query Track)

    - + - diff --git a/archive/2022/results/qf-auflia-unsat-core.html b/archive/2022/results/qf-auflia-unsat-core.html index 31ce3a4c..b1e07864 100644 --- a/archive/2022/results/qf-auflia-unsat-core.html +++ b/archive/2022/results/qf-auflia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AUFLIA (Unsat Core Track)

    Competition results for the QF_AUFLIA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_AUFLIA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    QF_AUFLIA (Unsat Core Track)

    - + cvc5 0 24574 @@ -137,7 +137,7 @@

    QF_AUFLIA (Unsat Core Track)

    - + z3-4.8.17n 0 23024 @@ -148,7 +148,7 @@

    QF_AUFLIA (Unsat Core Track)

    - + Yices2 0 17786 @@ -159,7 +159,7 @@

    QF_AUFLIA (Unsat Core Track)

    - + MathSATn 0 2401 @@ -170,7 +170,7 @@

    QF_AUFLIA (Unsat Core Track)

    - + 2021-MathSAT5n 0 2401 @@ -181,7 +181,7 @@

    QF_AUFLIA (Unsat Core Track)

    - + smtinterpol 0 1335 @@ -203,7 +203,7 @@

    QF_AUFLIA (Unsat Core Track)

    - + cvc5 0 24574160.274156.8240 @@ -212,7 +212,7 @@

    QF_AUFLIA (Unsat Core Track)

    - + z3-4.8.17n 0 2302419.96819.5730 @@ -221,7 +221,7 @@

    QF_AUFLIA (Unsat Core Track)

    - + Yices2 0 1778618.16819.7230 @@ -230,7 +230,7 @@

    QF_AUFLIA (Unsat Core Track)

    - + 2021-MathSAT5n 0 2401105.795102.5240 @@ -239,7 +239,7 @@

    QF_AUFLIA (Unsat Core Track)

    - + MathSATn 0 2401104.951105.0650 @@ -248,7 +248,7 @@

    QF_AUFLIA (Unsat Core Track)

    - + smtinterpol 0 1335764.511320.5990 @@ -272,7 +272,6 @@

    QF_AUFLIA (Unsat Core Track)

    - + - diff --git a/archive/2022/results/qf-aufnia-proof-exhibition.html b/archive/2022/results/qf-aufnia-proof-exhibition.html index 6bc9b17c..a4af9abe 100644 --- a/archive/2022/results/qf-aufnia-proof-exhibition.html +++ b/archive/2022/results/qf-aufnia-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AUFNIA (Proof Exhibition Track)

    Competition results for the QF_AUFNIA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_AUFNIA (Proof Exhibition Track)

    - + smtinterpol 0 12 @@ -130,7 +130,7 @@

    QF_AUFNIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 12 @@ -141,7 +141,7 @@

    QF_AUFNIA (Proof Exhibition Track)

    - + cvc5 0 0 @@ -163,7 +163,7 @@

    QF_AUFNIA (Proof Exhibition Track)

    - + smtinterpol 0 1235.04811.81900 @@ -172,7 +172,7 @@

    QF_AUFNIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 12123.5590.47900 @@ -181,7 +181,7 @@

    QF_AUFNIA (Proof Exhibition Track)

    - + cvc5 0 014400.014400.01212 @@ -205,7 +205,6 @@

    QF_AUFNIA (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/qf-aufnia-single-query.html b/archive/2022/results/qf-aufnia-single-query.html index f5d91f16..b7c29aa0 100644 --- a/archive/2022/results/qf-aufnia-single-query.html +++ b/archive/2022/results/qf-aufnia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AUFNIA (Single Query Track)

    Competition results for the QF_AUFNIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_AUFNIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) smtinterpolsmtinterpolsmtinterpol - - + + smtinterpol - - + + smtinterpol - + @@ -131,7 +131,7 @@

    QF_AUFNIA (Single Query Track)

    - + MathSATn 0 9 @@ -142,7 +142,7 @@

    QF_AUFNIA (Single Query Track)

    - + 2020-CVC4n 0 9 @@ -153,7 +153,7 @@

    QF_AUFNIA (Single Query Track)

    - + smtinterpol 0 9 @@ -164,7 +164,7 @@

    QF_AUFNIA (Single Query Track)

    - + cvc5 0 9 @@ -175,7 +175,7 @@

    QF_AUFNIA (Single Query Track)

    - + z3-4.8.17n 0 8 @@ -197,7 +197,7 @@

    QF_AUFNIA (Single Query Track)

    - + MathSATn 0 97.5157.51892700 @@ -206,7 +206,7 @@

    QF_AUFNIA (Single Query Track)

    - + smtinterpol 0 950.4617.2492700 @@ -215,7 +215,7 @@

    QF_AUFNIA (Single Query Track)

    - + 2020-CVC4n 0 922.16322.16292700 @@ -224,7 +224,7 @@

    QF_AUFNIA (Single Query Track)

    - + cvc5 0 9155.507151.78592700 @@ -233,7 +233,7 @@

    QF_AUFNIA (Single Query Track)

    - + z3-4.8.17n 0 81203.4931203.48181711 @@ -253,7 +253,7 @@

    QF_AUFNIA (Single Query Track)

    - + MathSATn 0 20.4760.476220070 @@ -262,7 +262,7 @@

    QF_AUFNIA (Single Query Track)

    - + smtinterpol 0 28.3082.92220070 @@ -271,7 +271,7 @@

    QF_AUFNIA (Single Query Track)

    - + 2020-CVC4n 0 23.0813.08220070 @@ -280,7 +280,7 @@

    QF_AUFNIA (Single Query Track)

    - + cvc5 0 216.02416.027220070 @@ -289,7 +289,7 @@

    QF_AUFNIA (Single Query Track)

    - + z3-4.8.17n 0 11202.3681202.367110171 @@ -309,7 +309,7 @@

    QF_AUFNIA (Single Query Track)

    - + z3-4.8.17n 0 71.1251.114707021 @@ -318,7 +318,7 @@

    QF_AUFNIA (Single Query Track)

    - + MathSATn 0 77.0397.042707020 @@ -327,7 +327,7 @@

    QF_AUFNIA (Single Query Track)

    - + smtinterpol 0 742.15314.319707020 @@ -336,7 +336,7 @@

    QF_AUFNIA (Single Query Track)

    - + 2020-CVC4n 0 719.08319.082707020 @@ -345,7 +345,7 @@

    QF_AUFNIA (Single Query Track)

    - + cvc5 0 7139.483135.758707020 @@ -365,7 +365,7 @@

    QF_AUFNIA (Single Query Track)

    - + MathSATn 0 97.5157.51892700 @@ -374,7 +374,7 @@

    QF_AUFNIA (Single Query Track)

    - + smtinterpol 0 950.4617.2492700 @@ -383,7 +383,7 @@

    QF_AUFNIA (Single Query Track)

    - + 2020-CVC4n 0 922.16322.16292700 @@ -392,7 +392,7 @@

    QF_AUFNIA (Single Query Track)

    - + z3-4.8.17n 0 827.49327.48181711 @@ -401,7 +401,7 @@

    QF_AUFNIA (Single Query Track)

    - + cvc5 0 7109.121105.36672522 @@ -425,7 +425,6 @@

    QF_AUFNIA (Single Query Track)

    - + - diff --git a/archive/2022/results/qf-aufnia-unsat-core.html b/archive/2022/results/qf-aufnia-unsat-core.html index 038f4611..f950b094 100644 --- a/archive/2022/results/qf-aufnia-unsat-core.html +++ b/archive/2022/results/qf-aufnia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AUFNIA (Unsat Core Track)

    Competition results for the QF_AUFNIA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_AUFNIA (Unsat Core Track)

    Sequential PerformanceParallel Performance smtinterpolsmtinterpol - - + + @@ -126,7 +126,7 @@

    QF_AUFNIA (Unsat Core Track)

    - + smtinterpol 0 20421 @@ -137,7 +137,7 @@

    QF_AUFNIA (Unsat Core Track)

    - + z3-4.8.17n 0 20257 @@ -148,7 +148,7 @@

    QF_AUFNIA (Unsat Core Track)

    - + MathSATn 0 20149 @@ -159,7 +159,7 @@

    QF_AUFNIA (Unsat Core Track)

    - + 2021-cvc5-ucn 0 20095 @@ -170,7 +170,7 @@

    QF_AUFNIA (Unsat Core Track)

    - + cvc5 0 16434 @@ -192,7 +192,7 @@

    QF_AUFNIA (Unsat Core Track)

    - + smtinterpol 0 2042168.28120.8850 @@ -201,7 +201,7 @@

    QF_AUFNIA (Unsat Core Track)

    - + z3-4.8.17n 0 202571.4971.4760 @@ -210,7 +210,7 @@

    QF_AUFNIA (Unsat Core Track)

    - + MathSATn 0 2014910.5589.5110 @@ -219,7 +219,7 @@

    QF_AUFNIA (Unsat Core Track)

    - + 2021-cvc5-ucn 0 20095165.335165.350 @@ -228,7 +228,7 @@

    QF_AUFNIA (Unsat Core Track)

    - + cvc5 0 16434151.674151.6960 @@ -252,7 +252,6 @@

    QF_AUFNIA (Unsat Core Track)

    - + - diff --git a/archive/2022/results/qf-ax-proof-exhibition.html b/archive/2022/results/qf-ax-proof-exhibition.html index 308bd2e4..70951350 100644 --- a/archive/2022/results/qf-ax-proof-exhibition.html +++ b/archive/2022/results/qf-ax-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AX (Proof Exhibition Track)

    Competition results for the QF_AX - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_AX (Proof Exhibition Track)

    - + smtinterpol 0 279 @@ -130,7 +130,7 @@

    QF_AX (Proof Exhibition Track)

    - + cvc5-lfsc 0 266 @@ -141,7 +141,7 @@

    QF_AX (Proof Exhibition Track)

    - + cvc5 0 69 @@ -163,7 +163,7 @@

    QF_AX (Proof Exhibition Track)

    - + smtinterpol 0 2791250.852594.54100 @@ -172,7 +172,7 @@

    QF_AX (Proof Exhibition Track)

    - + cvc5-lfsc 0 26630539.61930526.8281313 @@ -181,7 +181,7 @@

    QF_AX (Proof Exhibition Track)

    - + cvc5 0 69235687.803235557.108210188 @@ -205,7 +205,6 @@

    QF_AX (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/qf-ax-single-query.html b/archive/2022/results/qf-ax-single-query.html index eaca5e7a..2cca3558 100644 --- a/archive/2022/results/qf-ax-single-query.html +++ b/archive/2022/results/qf-ax-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AX (Single Query Track)

    Competition results for the QF_AX - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_AX (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Yices2Yices2Yices2 - - + + Yices2 - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_AX (Single Query Track)

    - + Yices2 0 300 @@ -142,7 +142,7 @@

    QF_AX (Single Query Track)

    - + z3-4.8.17n 0 300 @@ -153,7 +153,7 @@

    QF_AX (Single Query Track)

    - + MathSATn 0 300 @@ -164,7 +164,7 @@

    QF_AX (Single Query Track)

    - + 2021-z3n 0 300 @@ -175,7 +175,7 @@

    QF_AX (Single Query Track)

    - + cvc5 0 300 @@ -186,7 +186,7 @@

    QF_AX (Single Query Track)

    - + OpenSMT 0 300 @@ -197,7 +197,7 @@

    QF_AX (Single Query Track)

    - + OpenSMT-fixedn 0 300 @@ -208,7 +208,7 @@

    QF_AX (Single Query Track)

    - + smtinterpol 0 300 @@ -230,7 +230,7 @@

    QF_AX (Single Query Track)

    - + Yices2 0 3005.5156.74830013116900 @@ -239,7 +239,7 @@

    QF_AX (Single Query Track)

    - + 2021-z3n 0 30027.10423.88430013116900 @@ -248,7 +248,7 @@

    QF_AX (Single Query Track)

    - + z3-4.8.17n 0 30025.44424.94630013116900 @@ -257,7 +257,7 @@

    QF_AX (Single Query Track)

    - + MathSATn 0 30026.66926.74330013116900 @@ -266,7 +266,7 @@

    QF_AX (Single Query Track)

    - + cvc5 0 300202.748202.7130013116900 @@ -275,7 +275,7 @@

    QF_AX (Single Query Track)

    - + OpenSMT 0 300287.501286.7930013116900 @@ -284,7 +284,7 @@

    QF_AX (Single Query Track)

    - + smtinterpol 0 300945.73375.38430013116900 @@ -293,7 +293,7 @@

    QF_AX (Single Query Track)

    - + OpenSMT-fixedn 0 300454.197439.23830013116900 @@ -313,7 +313,7 @@

    QF_AX (Single Query Track)

    - + Yices2 0 1310.881.758131131001690 @@ -322,7 +322,7 @@

    QF_AX (Single Query Track)

    - + 2021-z3n 0 1315.3675.376131131001690 @@ -331,7 +331,7 @@

    QF_AX (Single Query Track)

    - + z3-4.8.17n 0 1316.0975.872131131001690 @@ -340,7 +340,7 @@

    QF_AX (Single Query Track)

    - + MathSATn 0 1318.2118.238131131001690 @@ -349,7 +349,7 @@

    QF_AX (Single Query Track)

    - + cvc5 0 13117.62517.58131131001690 @@ -358,7 +358,7 @@

    QF_AX (Single Query Track)

    - + OpenSMT 0 13124.28423.446131131001690 @@ -367,7 +367,7 @@

    QF_AX (Single Query Track)

    - + OpenSMT-fixedn 0 13123.73123.918131131001690 @@ -376,7 +376,7 @@

    QF_AX (Single Query Track)

    - + smtinterpol 0 131160.8276.056131131001690 @@ -396,7 +396,7 @@

    QF_AX (Single Query Track)

    - + Yices2 0 1694.6354.989169016901310 @@ -405,7 +405,7 @@

    QF_AX (Single Query Track)

    - + MathSATn 0 16918.45918.505169016901310 @@ -414,7 +414,7 @@

    QF_AX (Single Query Track)

    - + 2021-z3n 0 16921.73618.507169016901310 @@ -423,7 +423,7 @@

    QF_AX (Single Query Track)

    - + z3-4.8.17n 0 16919.34719.075169016901310 @@ -432,7 +432,7 @@

    QF_AX (Single Query Track)

    - + cvc5 0 169185.122185.129169016901310 @@ -441,7 +441,7 @@

    QF_AX (Single Query Track)

    - + OpenSMT 0 169263.217263.344169016901310 @@ -450,7 +450,7 @@

    QF_AX (Single Query Track)

    - + smtinterpol 0 169784.91299.328169016901310 @@ -459,7 +459,7 @@

    QF_AX (Single Query Track)

    - + OpenSMT-fixedn 0 169430.467415.321169016901310 @@ -479,7 +479,7 @@

    QF_AX (Single Query Track)

    - + Yices2 0 3005.5156.74830013116900 @@ -488,7 +488,7 @@

    QF_AX (Single Query Track)

    - + 2021-z3n 0 30027.10423.88430013116900 @@ -497,7 +497,7 @@

    QF_AX (Single Query Track)

    - + z3-4.8.17n 0 30025.44424.94630013116900 @@ -506,7 +506,7 @@

    QF_AX (Single Query Track)

    - + MathSATn 0 30026.66926.74330013116900 @@ -515,7 +515,7 @@

    QF_AX (Single Query Track)

    - + smtinterpol 0 300945.73375.38430013116900 @@ -524,7 +524,7 @@

    QF_AX (Single Query Track)

    - + OpenSMT 0 299281.608280.8929913116811 @@ -533,7 +533,7 @@

    QF_AX (Single Query Track)

    - + cvc5 0 298187.288187.22929813116722 @@ -542,7 +542,7 @@

    QF_AX (Single Query Track)

    - + OpenSMT-fixedn 0 297395.676380.70229713116633 @@ -566,7 +566,6 @@

    QF_AX (Single Query Track)

    - + - diff --git a/archive/2022/results/qf-ax-unsat-core.html b/archive/2022/results/qf-ax-unsat-core.html index 0e8471c0..503ee6af 100644 --- a/archive/2022/results/qf-ax-unsat-core.html +++ b/archive/2022/results/qf-ax-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AX (Unsat Core Track)

    Competition results for the QF_AX - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_AX (Unsat Core Track)

    Sequential PerformanceParallel Performance Yices2Yices2 - - + + @@ -126,7 +126,7 @@

    QF_AX (Unsat Core Track)

    - + Yices2 0 38635 @@ -137,7 +137,7 @@

    QF_AX (Unsat Core Track)

    - + 2021-z3n 0 38275 @@ -148,7 +148,7 @@

    QF_AX (Unsat Core Track)

    - + z3-4.8.17n 0 38275 @@ -159,7 +159,7 @@

    QF_AX (Unsat Core Track)

    - + cvc5 0 32110 @@ -170,7 +170,7 @@

    QF_AX (Unsat Core Track)

    - + MathSATn 0 3699 @@ -181,7 +181,7 @@

    QF_AX (Unsat Core Track)

    - + smtinterpol 0 473 @@ -203,7 +203,7 @@

    QF_AX (Unsat Core Track)

    - + Yices2 0 386356.1297.2860 @@ -212,7 +212,7 @@

    QF_AX (Unsat Core Track)

    - + 2021-z3n 0 3827523.24223.2730 @@ -221,7 +221,7 @@

    QF_AX (Unsat Core Track)

    - + z3-4.8.17n 0 3827523.69623.3390 @@ -230,7 +230,7 @@

    QF_AX (Unsat Core Track)

    - + cvc5 0 32110155.545155.6950 @@ -239,7 +239,7 @@

    QF_AX (Unsat Core Track)

    - + MathSATn 0 369917.75317.7820 @@ -248,7 +248,7 @@

    QF_AX (Unsat Core Track)

    - + smtinterpol 0 473681.807283.1270 @@ -272,7 +272,6 @@

    QF_AX (Unsat Core Track)

    - + - diff --git a/archive/2022/results/qf-bitvec-incremental.html b/archive/2022/results/qf-bitvec-incremental.html index 6adeae6a..02ba3be7 100644 --- a/archive/2022/results/qf-bitvec-incremental.html +++ b/archive/2022/results/qf-bitvec-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Bitvec (Incremental Track)

    Competition results for the QF_Bitvec - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_Bitvec (Incremental Track)

    Parallel Performance Yices2 - - + + @@ -124,7 +124,7 @@

    QF_Bitvec (Incremental Track)

    - + Yices2 0 2527229810.44329742.62616016 @@ -133,7 +133,7 @@

    QF_Bitvec (Incremental Track)

    - + 2021-STPn 0 2525130772.87620772.302637013 @@ -142,7 +142,7 @@

    QF_Bitvec (Incremental Track)

    - + MathSATn 0 2518642462.75942262.389702020 @@ -151,7 +151,7 @@

    QF_Bitvec (Incremental Track)

    - + Bitwuzla 0 2514822149.44621982.988740010 @@ -160,7 +160,7 @@

    QF_Bitvec (Incremental Track)

    - + STP 0 2513119588.17619499.088757011 @@ -169,7 +169,7 @@

    QF_Bitvec (Incremental Track)

    - + cvc5 0 2496778835.87578697.958921015 @@ -178,7 +178,7 @@

    QF_Bitvec (Incremental Track)

    - + z3-4.8.17n 0 2485252005.32451858.0581036022 @@ -202,7 +202,6 @@

    QF_Bitvec (Incremental Track)

    - + - diff --git a/archive/2022/results/qf-bitvec-model-validation.html b/archive/2022/results/qf-bitvec-model-validation.html index 70bb4522..8e6b054d 100644 --- a/archive/2022/results/qf-bitvec-model-validation.html +++ b/archive/2022/results/qf-bitvec-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Bitvec (Model Validation Track)

    Competition results for the QF_Bitvec - + division - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_Bitvec (Model Validation Track)

    Sequential PerformanceParallel Performance BitwuzlaBitwuzla - - + + @@ -126,7 +126,7 @@

    QF_Bitvec (Model Validation Track)

    - + 2020-Bitwuzlan 0 7274 @@ -137,7 +137,7 @@

    QF_Bitvec (Model Validation Track)

    - + Bitwuzla 0 7272 @@ -148,7 +148,7 @@

    QF_Bitvec (Model Validation Track)

    - + Yices2 0 7246 @@ -159,7 +159,7 @@

    QF_Bitvec (Model Validation Track)

    - + STP 0 7213 @@ -170,7 +170,7 @@

    QF_Bitvec (Model Validation Track)

    - + z3-4.8.17n 0 7009 @@ -181,7 +181,7 @@

    QF_Bitvec (Model Validation Track)

    - + cvc5 0 6958 @@ -192,7 +192,7 @@

    QF_Bitvec (Model Validation Track)

    - + MathSATn 0 6729 @@ -203,7 +203,7 @@

    QF_Bitvec (Model Validation Track)

    - + Z3++BV 9 7068 @@ -225,7 +225,7 @@

    QF_Bitvec (Model Validation Track)

    - + 2020-Bitwuzlan 0 7274135139.38134862.60849 @@ -234,7 +234,7 @@

    QF_Bitvec (Model Validation Track)

    - + Bitwuzla 0 7272138262.308138056.09551 @@ -243,7 +243,7 @@

    QF_Bitvec (Model Validation Track)

    - + Yices2 0 7246178052.422177845.71777 @@ -252,7 +252,7 @@

    QF_Bitvec (Model Validation Track)

    - + STP 0 7213192266.876192176.014110 @@ -261,7 +261,7 @@

    QF_Bitvec (Model Validation Track)

    - + z3-4.8.17n 0 7009571123.255570621.912314 @@ -270,7 +270,7 @@

    QF_Bitvec (Model Validation Track)

    - + cvc5 0 6958566815.106566229.812360 @@ -279,7 +279,7 @@

    QF_Bitvec (Model Validation Track)

    - + MathSATn 0 6728757092.603756800.388474 @@ -288,7 +288,7 @@

    QF_Bitvec (Model Validation Track)

    - + Z3++BV 9 7068396897.859396601.771163 @@ -312,7 +312,6 @@

    QF_Bitvec (Model Validation Track)

    - + - diff --git a/archive/2022/results/qf-bitvec-proof-exhibition.html b/archive/2022/results/qf-bitvec-proof-exhibition.html index e9e71fe2..09c33d2c 100644 --- a/archive/2022/results/qf-bitvec-proof-exhibition.html +++ b/archive/2022/results/qf-bitvec-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Bitvec (Proof Exhibition Track)

    Competition results for the QF_Bitvec - + division - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_Bitvec (Proof Exhibition Track)

    - + cvc5-lfsc 0 6516 @@ -130,7 +130,7 @@

    QF_Bitvec (Proof Exhibition Track)

    - + cvc5 0 4140 @@ -152,7 +152,7 @@

    QF_Bitvec (Proof Exhibition Track)

    - + cvc5-lfsc 0 65169228659.1179228125.263733207270 @@ -161,7 +161,7 @@

    QF_Bitvec (Proof Exhibition Track)

    - + cvc5 0 414011350385.44211347119.994970809177 @@ -185,7 +185,6 @@

    QF_Bitvec (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/qf-bitvec-single-query.html b/archive/2022/results/qf-bitvec-single-query.html index c06eb915..dc0b7359 100644 --- a/archive/2022/results/qf-bitvec-single-query.html +++ b/archive/2022/results/qf-bitvec-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Bitvec (Single Query Track)

    Competition results for the QF_Bitvec - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_Bitvec (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) BitwuzlaBitwuzlaBitwuzla - - + + STP - - + + STP - + @@ -131,7 +131,7 @@

    QF_Bitvec (Single Query Track)

    - + STP-fixedn 0 8283 @@ -142,7 +142,7 @@

    QF_Bitvec (Single Query Track)

    - + 2020-Bitwuzla-fixedn 0 8263 @@ -153,7 +153,7 @@

    QF_Bitvec (Single Query Track)

    - + Bitwuzla 0 8257 @@ -164,7 +164,7 @@

    QF_Bitvec (Single Query Track)

    - + Yices2 0 8203 @@ -175,7 +175,7 @@

    QF_Bitvec (Single Query Track)

    - + STP 0 8196 @@ -186,7 +186,7 @@

    QF_Bitvec (Single Query Track)

    - + cvc5 0 7707 @@ -197,7 +197,7 @@

    QF_Bitvec (Single Query Track)

    - + Z3++BV 0 7452 @@ -208,7 +208,7 @@

    QF_Bitvec (Single Query Track)

    - + MathSATn 0 7230 @@ -219,7 +219,7 @@

    QF_Bitvec (Single Query Track)

    - + z3-4.8.17n 0 7203 @@ -241,7 +241,7 @@

    QF_Bitvec (Single Query Track)

    - + STP-fixedn 0 8283333523.644333248.3568283308152021810180 @@ -250,7 +250,7 @@

    QF_Bitvec (Single Query Track)

    - + 2020-Bitwuzla-fixedn 0 8263401982.76401637.3518263306551982010201 @@ -259,7 +259,7 @@

    QF_Bitvec (Single Query Track)

    - + Bitwuzla 0 8257406943.741406638.5028257306351942070207 @@ -268,7 +268,7 @@

    QF_Bitvec (Single Query Track)

    - + Yices2 0 8202461404.797461141.3298202303251702620262 @@ -277,7 +277,7 @@

    QF_Bitvec (Single Query Track)

    - + STP 0 8196446039.643445808.6978196299252042680267 @@ -286,7 +286,7 @@

    QF_Bitvec (Single Query Track)

    - + cvc5 0 77071283258.3261282463.8717707292947787570753 @@ -295,7 +295,7 @@

    QF_Bitvec (Single Query Track)

    - + Z3++BV 0 74521489942.7911489416.41774522967448510120934 @@ -304,7 +304,7 @@

    QF_Bitvec (Single Query Track)

    - + MathSATn 0 72301883449.5141882652.087723026194611123401231 @@ -313,7 +313,7 @@

    QF_Bitvec (Single Query Track)

    - + z3-4.8.17n 0 72031899207.8571898887.656720328084395126101260 @@ -333,7 +333,7 @@

    QF_Bitvec (Single Query Track)

    - + STP-fixedn 0 308199340.34499195.66308130810405343180 @@ -342,7 +342,7 @@

    QF_Bitvec (Single Query Track)

    - + 2020-Bitwuzla-fixedn 0 3065146171.271146012.52306530650565343201 @@ -351,7 +351,7 @@

    QF_Bitvec (Single Query Track)

    - + Bitwuzla 0 3063147030.947146855.296306330630585343207 @@ -360,7 +360,7 @@

    QF_Bitvec (Single Query Track)

    - + Yices2 0 3032187620.322187430.088303230320895343262 @@ -369,7 +369,7 @@

    QF_Bitvec (Single Query Track)

    - + STP 0 2992211912.469211831.562992299201295343267 @@ -378,7 +378,7 @@

    QF_Bitvec (Single Query Track)

    - + Z3++BV 0 2967381961.932381641.832967296701545343934 @@ -387,7 +387,7 @@

    QF_Bitvec (Single Query Track)

    - + cvc5 0 2929381320.164380723.3912929292901925343753 @@ -396,7 +396,7 @@

    QF_Bitvec (Single Query Track)

    - + z3-4.8.17n 0 2808550946.041550711.55928082808031353431260 @@ -405,7 +405,7 @@

    QF_Bitvec (Single Query Track)

    - + MathSATn 0 2619797360.289796853.77426192619050253431231 @@ -425,7 +425,7 @@

    QF_Bitvec (Single Query Track)

    - + STP 0 5204159727.174159577.137520405204773183267 @@ -434,7 +434,7 @@

    QF_Bitvec (Single Query Track)

    - + STP-fixedn 0 5202159783.301159652.696520205202793183180 @@ -443,7 +443,7 @@

    QF_Bitvec (Single Query Track)

    - + 2020-Bitwuzla-fixedn 0 5198181411.489181224.831519805198833183201 @@ -452,7 +452,7 @@

    QF_Bitvec (Single Query Track)

    - + Bitwuzla 0 5194185512.794185383.206519405194873183207 @@ -461,7 +461,7 @@

    QF_Bitvec (Single Query Track)

    - + Yices2 0 5170199384.476199311.2415170051701113183262 @@ -470,7 +470,7 @@

    QF_Bitvec (Single Query Track)

    - + cvc5 0 4778827538.162827340.484778047785033183753 @@ -479,7 +479,7 @@

    QF_Bitvec (Single Query Track)

    - + MathSATn 0 46111011689.2251011398.31346110461167031831231 @@ -488,7 +488,7 @@

    QF_Bitvec (Single Query Track)

    - + Z3++BV 0 44851033580.8591033374.5874485044857963183934 @@ -497,7 +497,7 @@

    QF_Bitvec (Single Query Track)

    - + z3-4.8.17n 0 43951273861.8161273776.09743950439588631831260 @@ -517,7 +517,7 @@

    QF_Bitvec (Single Query Track)

    - + STP-fixedn 0 778229005.81528823.5867782285149316820681 @@ -526,7 +526,7 @@

    QF_Bitvec (Single Query Track)

    - + STP 0 762731668.91631511.7617627270549228370836 @@ -535,7 +535,7 @@

    QF_Bitvec (Single Query Track)

    - + Bitwuzla 0 757343753.04843452.2717573273448398910891 @@ -544,7 +544,7 @@

    QF_Bitvec (Single Query Track)

    - + 2020-Bitwuzla-fixedn 0 755343140.6242864.4167553270648479110911 @@ -553,7 +553,7 @@

    QF_Bitvec (Single Query Track)

    - + Yices2 0 748033681.21533541.7637480256849129840984 @@ -562,7 +562,7 @@

    QF_Bitvec (Single Query Track)

    - + Z3++BV 0 608375232.2374848.758608322013882238102305 @@ -571,7 +571,7 @@

    QF_Bitvec (Single Query Track)

    - + cvc5 0 595591114.33990686.24595519554000250902508 @@ -580,7 +580,7 @@

    QF_Bitvec (Single Query Track)

    - + MathSATn 0 571682967.54182712.854571618433873274802745 @@ -589,7 +589,7 @@

    QF_Bitvec (Single Query Track)

    - + z3-4.8.17n 0 558088020.04887796.761558018903690288402884 @@ -613,7 +613,6 @@

    QF_Bitvec (Single Query Track)

    - + - diff --git a/archive/2022/results/qf-bitvec-unsat-core.html b/archive/2022/results/qf-bitvec-unsat-core.html index bf336b68..145c929e 100644 --- a/archive/2022/results/qf-bitvec-unsat-core.html +++ b/archive/2022/results/qf-bitvec-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Bitvec (Unsat Core Track)

    Competition results for the QF_Bitvec - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_Bitvec (Unsat Core Track)

    Sequential PerformanceParallel Performance BitwuzlaBitwuzla - - + + @@ -126,7 +126,7 @@

    QF_Bitvec (Unsat Core Track)

    - + Bitwuzla 0 2301687 @@ -137,7 +137,7 @@

    QF_Bitvec (Unsat Core Track)

    - + 2020-Bitwuzla-fixedn 0 2217052 @@ -148,7 +148,7 @@

    QF_Bitvec (Unsat Core Track)

    - + Yices2 0 1905566 @@ -159,7 +159,7 @@

    QF_Bitvec (Unsat Core Track)

    - + z3-4.8.17n 0 1633136 @@ -170,7 +170,7 @@

    QF_Bitvec (Unsat Core Track)

    - + cvc5 0 422993 @@ -181,7 +181,7 @@

    QF_Bitvec (Unsat Core Track)

    - + MathSATn 0 0 @@ -203,7 +203,7 @@

    QF_Bitvec (Unsat Core Track)

    - + Bitwuzla 0 2301687247155.949246984.343139 @@ -212,7 +212,7 @@

    QF_Bitvec (Unsat Core Track)

    - + 2020-Bitwuzla-fixedn 0 2217052240639.985240475.036134 @@ -221,7 +221,7 @@

    QF_Bitvec (Unsat Core Track)

    - + Yices2 0 1905566350303.297350151.92264 @@ -230,7 +230,7 @@

    QF_Bitvec (Unsat Core Track)

    - + z3-4.8.17n 0 16331361321872.3151321846.2651010 @@ -239,7 +239,7 @@

    QF_Bitvec (Unsat Core Track)

    - + cvc5 0 4229932494994.7652494910.931998 @@ -248,7 +248,7 @@

    QF_Bitvec (Unsat Core Track)

    - + MathSATn 0 0345.647346.0120 @@ -272,7 +272,6 @@

    QF_Bitvec (Unsat Core Track)

    - + - diff --git a/archive/2022/results/qf-bv-incremental.html b/archive/2022/results/qf-bv-incremental.html index a33dc315..019cad88 100644 --- a/archive/2022/results/qf-bv-incremental.html +++ b/archive/2022/results/qf-bv-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_BV (Incremental Track)

    Competition results for the QF_BV - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_BV (Incremental Track)

    Parallel Performance Yices2 - - + + @@ -124,7 +124,7 @@

    QF_BV (Incremental Track)

    - + Yices2 0 2527229810.44329742.6261616 @@ -133,7 +133,7 @@

    QF_BV (Incremental Track)

    - + 2021-STPn 0 2525130772.87620772.30263713 @@ -142,7 +142,7 @@

    QF_BV (Incremental Track)

    - + MathSATn 0 2518642462.75942262.38970220 @@ -151,7 +151,7 @@

    QF_BV (Incremental Track)

    - + Bitwuzla 0 2514822149.44621982.98874010 @@ -160,7 +160,7 @@

    QF_BV (Incremental Track)

    - + STP 0 2513119588.17619499.08875711 @@ -169,7 +169,7 @@

    QF_BV (Incremental Track)

    - + cvc5 0 2496778835.87578697.95892115 @@ -178,7 +178,7 @@

    QF_BV (Incremental Track)

    - + z3-4.8.17n 0 2485252005.32451858.058103622 @@ -202,7 +202,6 @@

    QF_BV (Incremental Track)

    - + - diff --git a/archive/2022/results/qf-bv-model-validation.html b/archive/2022/results/qf-bv-model-validation.html index 849924ee..fd5a673a 100644 --- a/archive/2022/results/qf-bv-model-validation.html +++ b/archive/2022/results/qf-bv-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_BV (Model Validation Track)

    Competition results for the QF_BV - + logic - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_BV (Model Validation Track)

    Sequential PerformanceParallel Performance BitwuzlaBitwuzla - - + + @@ -126,7 +126,7 @@

    QF_BV (Model Validation Track)

    - + 2020-Bitwuzlan 0 7274 @@ -137,7 +137,7 @@

    QF_BV (Model Validation Track)

    - + Bitwuzla 0 7272 @@ -148,7 +148,7 @@

    QF_BV (Model Validation Track)

    - + Yices2 0 7246 @@ -159,7 +159,7 @@

    QF_BV (Model Validation Track)

    - + STP 0 7213 @@ -170,7 +170,7 @@

    QF_BV (Model Validation Track)

    - + z3-4.8.17n 0 7009 @@ -181,7 +181,7 @@

    QF_BV (Model Validation Track)

    - + cvc5 0 6958 @@ -192,7 +192,7 @@

    QF_BV (Model Validation Track)

    - + MathSATn 0 6729 @@ -203,7 +203,7 @@

    QF_BV (Model Validation Track)

    - + Z3++BV 9 7068 @@ -225,7 +225,7 @@

    QF_BV (Model Validation Track)

    - + 2020-Bitwuzlan 0 7274135139.38134862.60849 @@ -234,7 +234,7 @@

    QF_BV (Model Validation Track)

    - + Bitwuzla 0 7272138262.308138056.09551 @@ -243,7 +243,7 @@

    QF_BV (Model Validation Track)

    - + Yices2 0 7246178052.422177845.71777 @@ -252,7 +252,7 @@

    QF_BV (Model Validation Track)

    - + STP 0 7213192266.876192176.014110 @@ -261,7 +261,7 @@

    QF_BV (Model Validation Track)

    - + z3-4.8.17n 0 7009571123.255570621.912314 @@ -270,7 +270,7 @@

    QF_BV (Model Validation Track)

    - + cvc5 0 6958566815.106566229.812360 @@ -279,7 +279,7 @@

    QF_BV (Model Validation Track)

    - + MathSATn 0 6728757092.603756800.388474 @@ -288,7 +288,7 @@

    QF_BV (Model Validation Track)

    - + Z3++BV 9 7068396897.859396601.771163 @@ -312,7 +312,6 @@

    QF_BV (Model Validation Track)

    - + - diff --git a/archive/2022/results/qf-bv-proof-exhibition.html b/archive/2022/results/qf-bv-proof-exhibition.html index 889a8212..0c9213c2 100644 --- a/archive/2022/results/qf-bv-proof-exhibition.html +++ b/archive/2022/results/qf-bv-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_BV (Proof Exhibition Track)

    Competition results for the QF_BV - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_BV (Proof Exhibition Track)

    - + cvc5-lfsc 0 6516 @@ -130,7 +130,7 @@

    QF_BV (Proof Exhibition Track)

    - + cvc5 0 4140 @@ -152,7 +152,7 @@

    QF_BV (Proof Exhibition Track)

    - + cvc5-lfsc 0 65169228659.1179228125.26373327270 @@ -161,7 +161,7 @@

    QF_BV (Proof Exhibition Track)

    - + cvc5 0 414011350385.44211347119.99497089177 @@ -185,7 +185,6 @@

    QF_BV (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/qf-bv-single-query.html b/archive/2022/results/qf-bv-single-query.html index 420724fa..03662fa2 100644 --- a/archive/2022/results/qf-bv-single-query.html +++ b/archive/2022/results/qf-bv-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_BV (Single Query Track)

    Competition results for the QF_BV - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_BV (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) BitwuzlaBitwuzlaBitwuzla - - + + STP - - + + STP - + @@ -131,7 +131,7 @@

    QF_BV (Single Query Track)

    - + STP-fixedn 0 8283 @@ -142,7 +142,7 @@

    QF_BV (Single Query Track)

    - + 2020-Bitwuzla-fixedn 0 8263 @@ -153,7 +153,7 @@

    QF_BV (Single Query Track)

    - + Bitwuzla 0 8257 @@ -164,7 +164,7 @@

    QF_BV (Single Query Track)

    - + Yices2 0 8203 @@ -175,7 +175,7 @@

    QF_BV (Single Query Track)

    - + STP 0 8196 @@ -186,7 +186,7 @@

    QF_BV (Single Query Track)

    - + cvc5 0 7707 @@ -197,7 +197,7 @@

    QF_BV (Single Query Track)

    - + Z3++BV 0 7452 @@ -208,7 +208,7 @@

    QF_BV (Single Query Track)

    - + MathSATn 0 7230 @@ -219,7 +219,7 @@

    QF_BV (Single Query Track)

    - + z3-4.8.17n 0 7203 @@ -241,7 +241,7 @@

    QF_BV (Single Query Track)

    - + STP-fixedn 0 8283333523.644333248.356828330815202181180 @@ -250,7 +250,7 @@

    QF_BV (Single Query Track)

    - + 2020-Bitwuzla-fixedn 0 8263401982.76401637.351826330655198201201 @@ -259,7 +259,7 @@

    QF_BV (Single Query Track)

    - + Bitwuzla 0 8257406943.741406638.502825730635194207207 @@ -268,7 +268,7 @@

    QF_BV (Single Query Track)

    - + Yices2 0 8202461404.797461141.329820230325170262262 @@ -277,7 +277,7 @@

    QF_BV (Single Query Track)

    - + STP 0 8196446039.643445808.697819629925204268267 @@ -286,7 +286,7 @@

    QF_BV (Single Query Track)

    - + cvc5 0 77071283258.3261282463.871770729294778757753 @@ -295,7 +295,7 @@

    QF_BV (Single Query Track)

    - + Z3++BV 0 74521489942.7911489416.4177452296744851012934 @@ -304,7 +304,7 @@

    QF_BV (Single Query Track)

    - + MathSATn 0 72301883449.5141882652.08772302619461112341231 @@ -313,7 +313,7 @@

    QF_BV (Single Query Track)

    - + z3-4.8.17n 0 72031899207.8571898887.65672032808439512611260 @@ -333,7 +333,7 @@

    QF_BV (Single Query Track)

    - + STP-fixedn 0 308199340.34499195.66308130810405343180 @@ -342,7 +342,7 @@

    QF_BV (Single Query Track)

    - + 2020-Bitwuzla-fixedn 0 3065146171.271146012.52306530650565343201 @@ -351,7 +351,7 @@

    QF_BV (Single Query Track)

    - + Bitwuzla 0 3063147030.947146855.296306330630585343207 @@ -360,7 +360,7 @@

    QF_BV (Single Query Track)

    - + Yices2 0 3032187620.322187430.088303230320895343262 @@ -369,7 +369,7 @@

    QF_BV (Single Query Track)

    - + STP 0 2992211912.469211831.562992299201295343267 @@ -378,7 +378,7 @@

    QF_BV (Single Query Track)

    - + Z3++BV 0 2967381961.932381641.832967296701545343934 @@ -387,7 +387,7 @@

    QF_BV (Single Query Track)

    - + cvc5 0 2929381320.164380723.3912929292901925343753 @@ -396,7 +396,7 @@

    QF_BV (Single Query Track)

    - + z3-4.8.17n 0 2808550946.041550711.55928082808031353431260 @@ -405,7 +405,7 @@

    QF_BV (Single Query Track)

    - + MathSATn 0 2619797360.289796853.77426192619050253431231 @@ -425,7 +425,7 @@

    QF_BV (Single Query Track)

    - + STP 0 5204159727.174159577.137520405204773183267 @@ -434,7 +434,7 @@

    QF_BV (Single Query Track)

    - + STP-fixedn 0 5202159783.301159652.696520205202793183180 @@ -443,7 +443,7 @@

    QF_BV (Single Query Track)

    - + 2020-Bitwuzla-fixedn 0 5198181411.489181224.831519805198833183201 @@ -452,7 +452,7 @@

    QF_BV (Single Query Track)

    - + Bitwuzla 0 5194185512.794185383.206519405194873183207 @@ -461,7 +461,7 @@

    QF_BV (Single Query Track)

    - + Yices2 0 5170199384.476199311.2415170051701113183262 @@ -470,7 +470,7 @@

    QF_BV (Single Query Track)

    - + cvc5 0 4778827538.162827340.484778047785033183753 @@ -479,7 +479,7 @@

    QF_BV (Single Query Track)

    - + MathSATn 0 46111011689.2251011398.31346110461167031831231 @@ -488,7 +488,7 @@

    QF_BV (Single Query Track)

    - + Z3++BV 0 44851033580.8591033374.5874485044857963183934 @@ -497,7 +497,7 @@

    QF_BV (Single Query Track)

    - + z3-4.8.17n 0 43951273861.8161273776.09743950439588631831260 @@ -517,7 +517,7 @@

    QF_BV (Single Query Track)

    - + STP-fixedn 0 778229005.81528823.586778228514931682681 @@ -526,7 +526,7 @@

    QF_BV (Single Query Track)

    - + STP 0 762731668.91631511.761762727054922837836 @@ -535,7 +535,7 @@

    QF_BV (Single Query Track)

    - + Bitwuzla 0 757343753.04843452.271757327344839891891 @@ -544,7 +544,7 @@

    QF_BV (Single Query Track)

    - + 2020-Bitwuzla-fixedn 0 755343140.6242864.416755327064847911911 @@ -553,7 +553,7 @@

    QF_BV (Single Query Track)

    - + Yices2 0 748033681.21533541.763748025684912984984 @@ -562,7 +562,7 @@

    QF_BV (Single Query Track)

    - + Z3++BV 0 608375232.2374848.75860832201388223812305 @@ -571,7 +571,7 @@

    QF_BV (Single Query Track)

    - + cvc5 0 595591114.33990686.2459551955400025092508 @@ -580,7 +580,7 @@

    QF_BV (Single Query Track)

    - + MathSATn 0 571682967.54182712.85457161843387327482745 @@ -589,7 +589,7 @@

    QF_BV (Single Query Track)

    - + z3-4.8.17n 0 558088020.04887796.76155801890369028842884 @@ -613,7 +613,6 @@

    QF_BV (Single Query Track)

    - + - diff --git a/archive/2022/results/qf-bv-unsat-core.html b/archive/2022/results/qf-bv-unsat-core.html index 677b7aa8..39511707 100644 --- a/archive/2022/results/qf-bv-unsat-core.html +++ b/archive/2022/results/qf-bv-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_BV (Unsat Core Track)

    Competition results for the QF_BV - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_BV (Unsat Core Track)

    Sequential PerformanceParallel Performance BitwuzlaBitwuzla - - + + @@ -126,7 +126,7 @@

    QF_BV (Unsat Core Track)

    - + Bitwuzla 0 2301687 @@ -137,7 +137,7 @@

    QF_BV (Unsat Core Track)

    - + 2020-Bitwuzla-fixedn 0 2217052 @@ -148,7 +148,7 @@

    QF_BV (Unsat Core Track)

    - + Yices2 0 1905566 @@ -159,7 +159,7 @@

    QF_BV (Unsat Core Track)

    - + z3-4.8.17n 0 1633136 @@ -170,7 +170,7 @@

    QF_BV (Unsat Core Track)

    - + cvc5 0 422993 @@ -181,7 +181,7 @@

    QF_BV (Unsat Core Track)

    - + MathSATn 0 0 @@ -203,7 +203,7 @@

    QF_BV (Unsat Core Track)

    - + Bitwuzla 0 2301687247155.949246984.343139 @@ -212,7 +212,7 @@

    QF_BV (Unsat Core Track)

    - + 2020-Bitwuzla-fixedn 0 2217052240639.985240475.036134 @@ -221,7 +221,7 @@

    QF_BV (Unsat Core Track)

    - + Yices2 0 1905566350303.297350151.92264 @@ -230,7 +230,7 @@

    QF_BV (Unsat Core Track)

    - + z3-4.8.17n 0 16331361321872.3151321846.2651010 @@ -239,7 +239,7 @@

    QF_BV (Unsat Core Track)

    - + cvc5 0 4229932494994.7652494910.931998 @@ -248,7 +248,7 @@

    QF_BV (Unsat Core Track)

    - + MathSATn 0 0345.647346.0120 @@ -272,7 +272,6 @@

    QF_BV (Unsat Core Track)

    - + - diff --git a/archive/2022/results/qf-bvfp-incremental.html b/archive/2022/results/qf-bvfp-incremental.html index 9a9ae6c6..1a809eb1 100644 --- a/archive/2022/results/qf-bvfp-incremental.html +++ b/archive/2022/results/qf-bvfp-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_BVFP (Incremental Track)

    Competition results for the QF_BVFP - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_BVFP (Incremental Track)

    Parallel Performance Bitwuzla - - + + @@ -124,7 +124,7 @@

    QF_BVFP (Incremental Track)

    - + Bitwuzla 0 200111367.41911348.658106 @@ -133,7 +133,7 @@

    QF_BVFP (Incremental Track)

    - + 2021-Bitwuzla - fixedn 0 200011624.67511625.344117 @@ -142,7 +142,7 @@

    QF_BVFP (Incremental Track)

    - + cvc5 0 199516860.25416859.4831611 @@ -151,7 +151,7 @@

    QF_BVFP (Incremental Track)

    - + MathSATn 0 199219102.31519096.8811914 @@ -160,7 +160,7 @@

    QF_BVFP (Incremental Track)

    - + z3-4.8.17n 0 195954143.71954145.5555232 @@ -184,7 +184,6 @@

    QF_BVFP (Incremental Track)

    - + - diff --git a/archive/2022/results/qf-bvfp-model-validation.html b/archive/2022/results/qf-bvfp-model-validation.html index 55dcf7f7..082e75a6 100644 --- a/archive/2022/results/qf-bvfp-model-validation.html +++ b/archive/2022/results/qf-bvfp-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_BVFP (Model Validation Track)

    Competition results for the QF_BVFP - + logic - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_BVFP (Model Validation Track)

    Sequential PerformanceParallel Performance BitwuzlaBitwuzla - - + + @@ -126,7 +126,7 @@

    QF_BVFP (Model Validation Track)

    - + Bitwuzla 0 7027 @@ -137,7 +137,7 @@

    QF_BVFP (Model Validation Track)

    - + z3-4.8.17n 0 7026 @@ -148,7 +148,7 @@

    QF_BVFP (Model Validation Track)

    - + MathSATn 0 7020 @@ -159,7 +159,7 @@

    QF_BVFP (Model Validation Track)

    - + cvc5 0 7020 @@ -181,7 +181,7 @@

    QF_BVFP (Model Validation Track)

    - + Bitwuzla 0 70271771.8381786.7771 @@ -190,7 +190,7 @@

    QF_BVFP (Model Validation Track)

    - + z3-4.8.17n 0 70263988.0183940.3981 @@ -199,7 +199,7 @@

    QF_BVFP (Model Validation Track)

    - + MathSATn 0 70201406.9621397.7430 @@ -208,7 +208,7 @@

    QF_BVFP (Model Validation Track)

    - + cvc5 0 70201516.1411515.2450 @@ -232,7 +232,6 @@

    QF_BVFP (Model Validation Track)

    - + - diff --git a/archive/2022/results/qf-bvfp-proof-exhibition.html b/archive/2022/results/qf-bvfp-proof-exhibition.html index 23f8c076..0efbb3b9 100644 --- a/archive/2022/results/qf-bvfp-proof-exhibition.html +++ b/archive/2022/results/qf-bvfp-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_BVFP (Proof Exhibition Track)

    Competition results for the QF_BVFP - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_BVFP (Proof Exhibition Track)

    - + cvc5-lfsc 0 282 @@ -130,7 +130,7 @@

    QF_BVFP (Proof Exhibition Track)

    - + cvc5 0 262 @@ -152,7 +152,7 @@

    QF_BVFP (Proof Exhibition Track)

    - + cvc5-lfsc 0 2821561330.8541561330.80713091264 @@ -161,7 +161,7 @@

    QF_BVFP (Proof Exhibition Track)

    - + cvc5 0 2621580172.5111580079.31313291276 @@ -185,7 +185,6 @@

    QF_BVFP (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/qf-bvfp-single-query.html b/archive/2022/results/qf-bvfp-single-query.html index 1f1be85f..8bcfa7f4 100644 --- a/archive/2022/results/qf-bvfp-single-query.html +++ b/archive/2022/results/qf-bvfp-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_BVFP (Single Query Track)

    Competition results for the QF_BVFP - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_BVFP (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) BitwuzlaBitwuzlaBitwuzla - - + + Bitwuzla - - + + Bitwuzla - + @@ -131,7 +131,7 @@

    QF_BVFP (Single Query Track)

    - + Bitwuzla 0 465 @@ -142,7 +142,7 @@

    QF_BVFP (Single Query Track)

    - + 2021-cvc5n 0 463 @@ -153,7 +153,7 @@

    QF_BVFP (Single Query Track)

    - + cvc5 0 463 @@ -164,7 +164,7 @@

    QF_BVFP (Single Query Track)

    - + MathSATn 0 457 @@ -175,7 +175,7 @@

    QF_BVFP (Single Query Track)

    - + z3-4.8.17n 0 455 @@ -186,7 +186,7 @@

    QF_BVFP (Single Query Track)

    - + COLIBRI 0 410 @@ -208,7 +208,7 @@

    QF_BVFP (Single Query Track)

    - + Bitwuzla 0 4652645.6522633.80446519527000 @@ -217,7 +217,7 @@

    QF_BVFP (Single Query Track)

    - + 2021-cvc5n 0 4635667.9985652.96346319526822 @@ -226,7 +226,7 @@

    QF_BVFP (Single Query Track)

    - + cvc5 0 4636712.4826675.49246319526822 @@ -235,7 +235,7 @@

    QF_BVFP (Single Query Track)

    - + MathSATn 0 45713879.24413875.12345719526288 @@ -244,7 +244,7 @@

    QF_BVFP (Single Query Track)

    - + z3-4.8.17n 0 45519853.2821026.45455195260108 @@ -253,7 +253,7 @@

    QF_BVFP (Single Query Track)

    - + COLIBRI 0 41028165.21328160.3464101892215521 @@ -273,7 +273,7 @@

    QF_BVFP (Single Query Track)

    - + Bitwuzla 0 195205.256205.341195195002700 @@ -282,7 +282,7 @@

    QF_BVFP (Single Query Track)

    - + MathSATn 0 195424.81424.383195195002708 @@ -291,7 +291,7 @@

    QF_BVFP (Single Query Track)

    - + 2021-cvc5n 0 195543.552538.969195195002702 @@ -300,7 +300,7 @@

    QF_BVFP (Single Query Track)

    - + cvc5 0 195892.394884.414195195002702 @@ -309,7 +309,7 @@

    QF_BVFP (Single Query Track)

    - + z3-4.8.17n 0 1951811.4881811.527195195002708 @@ -318,7 +318,7 @@

    QF_BVFP (Single Query Track)

    - + COLIBRI 0 1893695.7783691.1291891890627021 @@ -338,7 +338,7 @@

    QF_BVFP (Single Query Track)

    - + Bitwuzla 0 2702440.3962428.463270027001950 @@ -347,7 +347,7 @@

    QF_BVFP (Single Query Track)

    - + 2021-cvc5n 0 2685124.4465113.993268026821952 @@ -356,7 +356,7 @@

    QF_BVFP (Single Query Track)

    - + cvc5 0 2685820.0885791.079268026821952 @@ -365,7 +365,7 @@

    QF_BVFP (Single Query Track)

    - + MathSATn 0 26213454.43413450.74262026281958 @@ -374,7 +374,7 @@

    QF_BVFP (Single Query Track)

    - + z3-4.8.17n 0 26018041.79219214.9242600260101958 @@ -383,7 +383,7 @@

    QF_BVFP (Single Query Track)

    - + COLIBRI 0 22124469.43524469.21622102214919521 @@ -403,7 +403,7 @@

    QF_BVFP (Single Query Track)

    - + Bitwuzla 0 451854.028841.9334511942571414 @@ -412,7 +412,7 @@

    QF_BVFP (Single Query Track)

    - + 2021-cvc5n 0 4441932.4821917.0234441922522121 @@ -421,7 +421,7 @@

    QF_BVFP (Single Query Track)

    - + cvc5 0 4372108.1952070.8564371872502828 @@ -430,7 +430,7 @@

    QF_BVFP (Single Query Track)

    - + MathSATn 0 4321608.5831603.4674321922403333 @@ -439,7 +439,7 @@

    QF_BVFP (Single Query Track)

    - + z3-4.8.17n 0 4112672.9022672.5294111852265454 @@ -448,7 +448,7 @@

    QF_BVFP (Single Query Track)

    - + COLIBRI 0 4001175.7941170.6574001882126531 @@ -472,7 +472,6 @@

    QF_BVFP (Single Query Track)

    - + - diff --git a/archive/2022/results/qf-bvfp-unsat-core.html b/archive/2022/results/qf-bvfp-unsat-core.html index 11c26a33..20d703a1 100644 --- a/archive/2022/results/qf-bvfp-unsat-core.html +++ b/archive/2022/results/qf-bvfp-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_BVFP (Unsat Core Track)

    Competition results for the QF_BVFP - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_BVFP (Unsat Core Track)

    Sequential PerformanceParallel Performance BitwuzlaBitwuzla - - + + @@ -126,7 +126,7 @@

    QF_BVFP (Unsat Core Track)

    - + Bitwuzla 0 14229 @@ -137,7 +137,7 @@

    QF_BVFP (Unsat Core Track)

    - + 2021-Bitwuzlan 0 14183 @@ -148,7 +148,7 @@

    QF_BVFP (Unsat Core Track)

    - + z3-4.8.17n 0 13913 @@ -159,7 +159,7 @@

    QF_BVFP (Unsat Core Track)

    - + cvc5 0 11601 @@ -170,7 +170,7 @@

    QF_BVFP (Unsat Core Track)

    - + MathSATn 0 125 @@ -192,7 +192,7 @@

    QF_BVFP (Unsat Core Track)

    - + Bitwuzla 0 142294035.7354035.2482 @@ -201,7 +201,7 @@

    QF_BVFP (Unsat Core Track)

    - + 2021-Bitwuzlan 0 141833794.3443791.712 @@ -210,7 +210,7 @@

    QF_BVFP (Unsat Core Track)

    - + z3-4.8.17n 0 1391345684.87145666.81926 @@ -219,7 +219,7 @@

    QF_BVFP (Unsat Core Track)

    - + cvc5 0 11601298869.942298835.679214 @@ -228,7 +228,7 @@

    QF_BVFP (Unsat Core Track)

    - + MathSATn 0 12526.727.3840 @@ -252,7 +252,6 @@

    QF_BVFP (Unsat Core Track)

    - + - diff --git a/archive/2022/results/qf-bvfplra-incremental.html b/archive/2022/results/qf-bvfplra-incremental.html index 628d89aa..c65e2045 100644 --- a/archive/2022/results/qf-bvfplra-incremental.html +++ b/archive/2022/results/qf-bvfplra-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_BVFPLRA (Incremental Track)

    Competition results for the QF_BVFPLRA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_BVFPLRA (Incremental Track)

    Parallel Performance cvc5 - - + + @@ -124,7 +124,7 @@

    QF_BVFPLRA (Incremental Track)

    - + MathSATn 0 32736166.766159.85400 @@ -133,7 +133,7 @@

    QF_BVFPLRA (Incremental Track)

    - + cvc5 0 32736281.349279.43300 @@ -142,7 +142,7 @@

    QF_BVFPLRA (Incremental Track)

    - + Bitwuzla 0 327361012.6571011.7100 @@ -166,7 +166,6 @@

    QF_BVFPLRA (Incremental Track)

    - + - diff --git a/archive/2022/results/qf-bvfplra-model-validation.html b/archive/2022/results/qf-bvfplra-model-validation.html index b4ab5739..ec41f6cd 100644 --- a/archive/2022/results/qf-bvfplra-model-validation.html +++ b/archive/2022/results/qf-bvfplra-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_BVFPLRA (Model Validation Track)

    Competition results for the QF_BVFPLRA - + logic - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_BVFPLRA (Model Validation Track)

    Sequential PerformanceParallel Performance BitwuzlaBitwuzla - - + + @@ -126,7 +126,7 @@

    QF_BVFPLRA (Model Validation Track)

    - + Bitwuzla 0 113 @@ -137,7 +137,7 @@

    QF_BVFPLRA (Model Validation Track)

    - + z3-4.8.17n 0 110 @@ -148,7 +148,7 @@

    QF_BVFPLRA (Model Validation Track)

    - + cvc5 0 105 @@ -159,7 +159,7 @@

    QF_BVFPLRA (Model Validation Track)

    - + MathSATn 0 76 @@ -181,7 +181,7 @@

    QF_BVFPLRA (Model Validation Track)

    - + Bitwuzla 0 113457.517457.970 @@ -190,7 +190,7 @@

    QF_BVFPLRA (Model Validation Track)

    - + z3-4.8.17n 0 1104961.0794961.90 @@ -199,7 +199,7 @@

    QF_BVFPLRA (Model Validation Track)

    - + cvc5 0 1051737.251721.4741 @@ -208,7 +208,7 @@

    QF_BVFPLRA (Model Validation Track)

    - + MathSATn 0 762658.772612.0461 @@ -232,7 +232,6 @@

    QF_BVFPLRA (Model Validation Track)

    - + - diff --git a/archive/2022/results/qf-bvfplra-proof-exhibition.html b/archive/2022/results/qf-bvfplra-proof-exhibition.html index cba73869..e4b2bcb8 100644 --- a/archive/2022/results/qf-bvfplra-proof-exhibition.html +++ b/archive/2022/results/qf-bvfplra-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_BVFPLRA (Proof Exhibition Track)

    Competition results for the QF_BVFPLRA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_BVFPLRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 16 @@ -130,7 +130,7 @@

    QF_BVFPLRA (Proof Exhibition Track)

    - + cvc5 0 13 @@ -152,7 +152,7 @@

    QF_BVFPLRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 1643201.62343201.6093636 @@ -161,7 +161,7 @@

    QF_BVFPLRA (Proof Exhibition Track)

    - + cvc5 0 1345776.80645768.0583938 @@ -185,7 +185,6 @@

    QF_BVFPLRA (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/qf-bvfplra-single-query.html b/archive/2022/results/qf-bvfplra-single-query.html index 60f8a6a6..28bb5e8e 100644 --- a/archive/2022/results/qf-bvfplra-single-query.html +++ b/archive/2022/results/qf-bvfplra-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_BVFPLRA (Single Query Track)

    Competition results for the QF_BVFPLRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_BVFPLRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) BitwuzlaBitwuzlaBitwuzla - - + + cvc5 - - + + Bitwuzla - + @@ -131,7 +131,7 @@

    QF_BVFPLRA (Single Query Track)

    - + Bitwuzla 0 62 @@ -142,7 +142,7 @@

    QF_BVFPLRA (Single Query Track)

    - + cvc5 0 62 @@ -153,7 +153,7 @@

    QF_BVFPLRA (Single Query Track)

    - + 2021-cvc5n 0 61 @@ -164,7 +164,7 @@

    QF_BVFPLRA (Single Query Track)

    - + z3-4.8.17n 0 58 @@ -175,7 +175,7 @@

    QF_BVFPLRA (Single Query Track)

    - + MathSATn 0 56 @@ -186,7 +186,7 @@

    QF_BVFPLRA (Single Query Track)

    - + COLIBRI 0 49 @@ -208,7 +208,7 @@

    QF_BVFPLRA (Single Query Track)

    - + Bitwuzla 0 6210816.26410812.79762362677 @@ -217,7 +217,7 @@

    QF_BVFPLRA (Single Query Track)

    - + cvc5 0 6215529.73715530.77362352777 @@ -226,7 +226,7 @@

    QF_BVFPLRA (Single Query Track)

    - + 2021-cvc5n 0 6115342.97815343.70761352688 @@ -235,7 +235,7 @@

    QF_BVFPLRA (Single Query Track)

    - + z3-4.8.17n 0 5819662.83319664.1975835231110 @@ -244,7 +244,7 @@

    QF_BVFPLRA (Single Query Track)

    - + MathSATn 0 5620224.31220225.4525635211313 @@ -253,7 +253,7 @@

    QF_BVFPLRA (Single Query Track)

    - + COLIBRI 0 4910867.76710867.791492524209 @@ -273,7 +273,7 @@

    QF_BVFPLRA (Single Query Track)

    - + Bitwuzla 0 36407.572403.826363600337 @@ -282,7 +282,7 @@

    QF_BVFPLRA (Single Query Track)

    - + z3-4.8.17n 0 351022.4571022.6113535013310 @@ -291,7 +291,7 @@

    QF_BVFPLRA (Single Query Track)

    - + 2021-cvc5n 0 351546.9041546.921353501338 @@ -300,7 +300,7 @@

    QF_BVFPLRA (Single Query Track)

    - + cvc5 0 351713.9891714.033353501337 @@ -309,7 +309,7 @@

    QF_BVFPLRA (Single Query Track)

    - + MathSATn 0 352585.1682585.5013535013313 @@ -318,7 +318,7 @@

    QF_BVFPLRA (Single Query Track)

    - + COLIBRI 0 252436.7932436.8182525011339 @@ -338,7 +338,7 @@

    QF_BVFPLRA (Single Query Track)

    - + cvc5 0 2713815.74713816.74270276367 @@ -347,7 +347,7 @@

    QF_BVFPLRA (Single Query Track)

    - + Bitwuzla 0 2610408.69310408.971260267367 @@ -356,7 +356,7 @@

    QF_BVFPLRA (Single Query Track)

    - + 2021-cvc5n 0 2613796.07313796.786260267368 @@ -365,7 +365,7 @@

    QF_BVFPLRA (Single Query Track)

    - + COLIBRI 0 248430.9758430.973240249369 @@ -374,7 +374,7 @@

    QF_BVFPLRA (Single Query Track)

    - + z3-4.8.17n 0 2318640.37718641.58723023103610 @@ -383,7 +383,7 @@

    QF_BVFPLRA (Single Query Track)

    - + MathSATn 0 2117639.14517639.9521021123613 @@ -403,7 +403,7 @@

    QF_BVFPLRA (Single Query Track)

    - + Bitwuzla 0 51540.962537.1185134171818 @@ -412,7 +412,7 @@

    QF_BVFPLRA (Single Query Track)

    - + COLIBRI 0 49283.767283.791492524209 @@ -421,7 +421,7 @@

    QF_BVFPLRA (Single Query Track)

    - + 2021-cvc5n 0 46749.399749.4084630162323 @@ -430,7 +430,7 @@

    QF_BVFPLRA (Single Query Track)

    - + cvc5 0 42749.401749.4324228142727 @@ -439,7 +439,7 @@

    QF_BVFPLRA (Single Query Track)

    - + MathSATn 0 41786.724786.7424127142828 @@ -448,7 +448,7 @@

    QF_BVFPLRA (Single Query Track)

    - + z3-4.8.17n 0 40896.888896.8424027132929 @@ -472,7 +472,6 @@

    QF_BVFPLRA (Single Query Track)

    - + - diff --git a/archive/2022/results/qf-bvfplra-unsat-core.html b/archive/2022/results/qf-bvfplra-unsat-core.html index bcc7be62..6af21643 100644 --- a/archive/2022/results/qf-bvfplra-unsat-core.html +++ b/archive/2022/results/qf-bvfplra-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_BVFPLRA (Unsat Core Track)

    Competition results for the QF_BVFPLRA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_BVFPLRA (Unsat Core Track)

    Sequential PerformanceParallel Performance BitwuzlaBitwuzla - - + + @@ -126,7 +126,7 @@

    QF_BVFPLRA (Unsat Core Track)

    - + Bitwuzla 0 329 @@ -137,7 +137,7 @@

    QF_BVFPLRA (Unsat Core Track)

    - + 2021-Bitwuzlan 0 319 @@ -148,7 +148,7 @@

    QF_BVFPLRA (Unsat Core Track)

    - + z3-4.8.17n 0 231 @@ -159,7 +159,7 @@

    QF_BVFPLRA (Unsat Core Track)

    - + cvc5 0 53 @@ -170,7 +170,7 @@

    QF_BVFPLRA (Unsat Core Track)

    - + MathSATn 0 0 @@ -192,7 +192,7 @@

    QF_BVFPLRA (Unsat Core Track)

    - + Bitwuzla 0 32912829.72812824.857 @@ -201,7 +201,7 @@

    QF_BVFPLRA (Unsat Core Track)

    - + 2021-Bitwuzlan 0 31912102.47812102.6269 @@ -210,7 +210,7 @@

    QF_BVFPLRA (Unsat Core Track)

    - + z3-4.8.17n 0 23128646.99428647.4921 @@ -219,7 +219,7 @@

    QF_BVFPLRA (Unsat Core Track)

    - + cvc5 0 5342854.32942854.58833 @@ -228,7 +228,7 @@

    QF_BVFPLRA (Unsat Core Track)

    - + MathSATn 0 00.9050.9320 @@ -252,7 +252,6 @@

    QF_BVFPLRA (Unsat Core Track)

    - + - diff --git a/archive/2022/results/qf-datatypes-proof-exhibition.html b/archive/2022/results/qf-datatypes-proof-exhibition.html index f29d769a..eef842b2 100644 --- a/archive/2022/results/qf-datatypes-proof-exhibition.html +++ b/archive/2022/results/qf-datatypes-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Datatypes (Proof Exhibition Track)

    Competition results for the QF_Datatypes - + division - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_Datatypes (Proof Exhibition Track)

    - + smtinterpol 0 2223 @@ -130,7 +130,7 @@

    QF_Datatypes (Proof Exhibition Track)

    - + cvc5-lfsc 0 2214 @@ -141,7 +141,7 @@

    QF_Datatypes (Proof Exhibition Track)

    - + cvc5 0 2209 @@ -163,7 +163,7 @@

    QF_Datatypes (Proof Exhibition Track)

    - + smtinterpol 0 2223178792.487172419.3221380132 @@ -172,7 +172,7 @@

    QF_Datatypes (Proof Exhibition Track)

    - + cvc5-lfsc 0 2214179609.848179608.6381470146 @@ -181,7 +181,7 @@

    QF_Datatypes (Proof Exhibition Track)

    - + cvc5 0 2209182517.4182515.0191520151 @@ -205,7 +205,6 @@

    QF_Datatypes (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/qf-datatypes-single-query.html b/archive/2022/results/qf-datatypes-single-query.html index 55fc2664..150bfcd5 100644 --- a/archive/2022/results/qf-datatypes-single-query.html +++ b/archive/2022/results/qf-datatypes-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Datatypes (Single Query Track)

    Competition results for the QF_Datatypes - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_Datatypes (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + smtinterpol - + @@ -131,7 +131,7 @@

    QF_Datatypes (Single Query Track)

    - + z3-4.8.17n 0 207 @@ -142,7 +142,7 @@

    QF_Datatypes (Single Query Track)

    - + 2021-z3n 0 205 @@ -153,7 +153,7 @@

    QF_Datatypes (Single Query Track)

    - + cvc5 0 190 @@ -164,7 +164,7 @@

    QF_Datatypes (Single Query Track)

    - + smtinterpol 0 17 @@ -186,7 +186,7 @@

    QF_Datatypes (Single Query Track)

    - + z3-4.8.17n 0 207330308.559330318.623207481591940194 @@ -195,7 +195,7 @@

    QF_Datatypes (Single Query Track)

    - + 2021-z3n 0 205331799.096331807.181205471581960196 @@ -204,7 +204,7 @@

    QF_Datatypes (Single Query Track)

    - + cvc5 0 190354231.815354245.847190511392110211 @@ -213,7 +213,7 @@

    QF_Datatypes (Single Query Track)

    - + smtinterpol 0 27473834.091467275.516270273740374 @@ -233,7 +233,7 @@

    QF_Datatypes (Single Query Track)

    - + cvc5 0 51204382.734204387.91451510149201211 @@ -242,7 +242,7 @@

    QF_Datatypes (Single Query Track)

    - + z3-4.8.17n 0 48211888.097211890.90348480152201194 @@ -251,7 +251,7 @@

    QF_Datatypes (Single Query Track)

    - + 2021-z3n 0 47211779.365211781.61447470153201196 @@ -260,7 +260,7 @@

    QF_Datatypes (Single Query Track)

    - + smtinterpol 0 0240000.0240000.0000200201374 @@ -280,7 +280,7 @@

    QF_Datatypes (Single Query Track)

    - + z3-4.8.17n 0 159118420.462118427.719159015942200194 @@ -289,7 +289,7 @@

    QF_Datatypes (Single Query Track)

    - + 2021-z3n 0 158120019.731120025.568158015843200196 @@ -298,7 +298,7 @@

    QF_Datatypes (Single Query Track)

    - + cvc5 0 139149849.082149857.933139013962200211 @@ -307,7 +307,7 @@

    QF_Datatypes (Single Query Track)

    - + smtinterpol 0 27233834.091227275.51627027174200374 @@ -327,7 +327,7 @@

    QF_Datatypes (Single Query Track)

    - + z3-4.8.17n 0 49584.0329584.0284133970397 @@ -336,7 +336,7 @@

    QF_Datatypes (Single Query Track)

    - + 2021-z3n 0 49584.1719584.1744133970397 @@ -345,7 +345,7 @@

    QF_Datatypes (Single Query Track)

    - + smtinterpol 0 19618.6889617.691014000400 @@ -354,7 +354,7 @@

    QF_Datatypes (Single Query Track)

    - + cvc5 0 09624.09624.00004010401 @@ -378,7 +378,6 @@

    QF_Datatypes (Single Query Track)

    - + - diff --git a/archive/2022/results/qf-datatypes-unsat-core.html b/archive/2022/results/qf-datatypes-unsat-core.html index 0322ac3c..87d4d461 100644 --- a/archive/2022/results/qf-datatypes-unsat-core.html +++ b/archive/2022/results/qf-datatypes-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Datatypes (Unsat Core Track)

    Competition results for the QF_Datatypes - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_Datatypes (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    QF_Datatypes (Unsat Core Track)

    - + z3-4.8.17n 0 854021 @@ -137,7 +137,7 @@

    QF_Datatypes (Unsat Core Track)

    - + 2021-z3n 0 840410 @@ -148,7 +148,7 @@

    QF_Datatypes (Unsat Core Track)

    - + cvc5 0 189461 @@ -159,7 +159,7 @@

    QF_Datatypes (Unsat Core Track)

    - + smtinterpol 0 39537 @@ -181,7 +181,7 @@

    QF_Datatypes (Unsat Core Track)

    - + z3-4.8.17n 0 854021146698.21146712.15692 @@ -190,7 +190,7 @@

    QF_Datatypes (Unsat Core Track)

    - + 2021-z3n 0 840410151152.623151402.75196 @@ -199,7 +199,7 @@

    QF_Datatypes (Unsat Core Track)

    - + cvc5 0 189461217443.691217447.251162 @@ -208,7 +208,7 @@

    QF_Datatypes (Unsat Core Track)

    - + smtinterpol 0 83167234082.645227401.265174 @@ -232,7 +232,6 @@

    QF_Datatypes (Unsat Core Track)

    - + - diff --git a/archive/2022/results/qf-dt-proof-exhibition.html b/archive/2022/results/qf-dt-proof-exhibition.html index c8dbc2d2..3af698d4 100644 --- a/archive/2022/results/qf-dt-proof-exhibition.html +++ b/archive/2022/results/qf-dt-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_DT (Proof Exhibition Track)

    Competition results for the QF_DT - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_DT (Proof Exhibition Track)

    - + cvc5-lfsc 0 2214 @@ -130,7 +130,7 @@

    QF_DT (Proof Exhibition Track)

    - + smtinterpol 0 2210 @@ -141,7 +141,7 @@

    QF_DT (Proof Exhibition Track)

    - + cvc5 0 2209 @@ -163,7 +163,7 @@

    QF_DT (Proof Exhibition Track)

    - + cvc5-lfsc 0 221459609.84859608.6384747 @@ -172,7 +172,7 @@

    QF_DT (Proof Exhibition Track)

    - + smtinterpol 0 221062467.14559409.7175145 @@ -181,7 +181,7 @@

    QF_DT (Proof Exhibition Track)

    - + cvc5 0 220962517.462515.0195252 @@ -205,7 +205,6 @@

    QF_DT (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/qf-dt-single-query.html b/archive/2022/results/qf-dt-single-query.html index 419f47b4..9e4eb779 100644 --- a/archive/2022/results/qf-dt-single-query.html +++ b/archive/2022/results/qf-dt-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_DT (Single Query Track)

    Competition results for the QF_DT - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_DT (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + smtinterpol - + @@ -131,7 +131,7 @@

    QF_DT (Single Query Track)

    - + z3-4.8.17n 0 106 @@ -142,7 +142,7 @@

    QF_DT (Single Query Track)

    - + 2021-z3n 0 104 @@ -153,7 +153,7 @@

    QF_DT (Single Query Track)

    - + cvc5 0 85 @@ -164,7 +164,7 @@

    QF_DT (Single Query Track)

    - + smtinterpol 0 7 @@ -186,7 +186,7 @@

    QF_DT (Single Query Track)

    - + z3-4.8.17n 0 106173680.147173686.11910641659595 @@ -195,7 +195,7 @@

    QF_DT (Single Query Track)

    - + 2021-z3n 0 104172890.636172895.77810440649797 @@ -204,7 +204,7 @@

    QF_DT (Single Query Track)

    - + cvc5 0 85188523.986188530.246852164116116 @@ -213,7 +213,7 @@

    QF_DT (Single Query Track)

    - + smtinterpol 0 14236800.506233591.814014187187 @@ -233,7 +233,7 @@

    QF_DT (Single Query Track)

    - + z3-4.8.17n 0 4196276.62296278.905414105910195 @@ -242,7 +242,7 @@

    QF_DT (Single Query Track)

    - + 2021-z3n 0 4095702.17195703.967404006010197 @@ -251,7 +251,7 @@

    QF_DT (Single Query Track)

    - + cvc5 0 21104497.477104498.9782121079101116 @@ -260,7 +260,7 @@

    QF_DT (Single Query Track)

    - + smtinterpol 0 0120000.0120000.0000100101187 @@ -280,7 +280,7 @@

    QF_DT (Single Query Track)

    - + z3-4.8.17n 0 6577403.52577407.214650653610095 @@ -289,7 +289,7 @@

    QF_DT (Single Query Track)

    - + 2021-z3n 0 6477188.46577191.811640643710097 @@ -298,7 +298,7 @@

    QF_DT (Single Query Track)

    - + cvc5 0 6484026.50984031.2686406437100116 @@ -307,7 +307,7 @@

    QF_DT (Single Query Track)

    - + smtinterpol 0 14116800.506113591.81401487100187 @@ -327,7 +327,7 @@

    QF_DT (Single Query Track)

    - + z3-4.8.17n 0 44784.0324784.028413197197 @@ -336,7 +336,7 @@

    QF_DT (Single Query Track)

    - + 2021-z3n 0 44784.1714784.174413197197 @@ -345,7 +345,7 @@

    QF_DT (Single Query Track)

    - + smtinterpol 0 14818.6884817.69101200200 @@ -354,7 +354,7 @@

    QF_DT (Single Query Track)

    - + cvc5 0 04824.04824.0000201201 @@ -378,7 +378,6 @@

    QF_DT (Single Query Track)

    - + - diff --git a/archive/2022/results/qf-dt-unsat-core.html b/archive/2022/results/qf-dt-unsat-core.html index e61dba9c..34a7d52a 100644 --- a/archive/2022/results/qf-dt-unsat-core.html +++ b/archive/2022/results/qf-dt-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_DT (Unsat Core Track)

    Competition results for the QF_DT - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_DT (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    QF_DT (Unsat Core Track)

    - + 2021-z3n 0 186395 @@ -137,7 +137,7 @@

    QF_DT (Unsat Core Track)

    - + z3-4.8.17n 0 186395 @@ -148,7 +148,7 @@

    QF_DT (Unsat Core Track)

    - + cvc5 0 76023 @@ -159,7 +159,7 @@

    QF_DT (Unsat Core Track)

    - + smtinterpol 0 20833 @@ -181,7 +181,7 @@

    QF_DT (Unsat Core Track)

    - + 2021-z3n 0 186395101361.818101366.2375 @@ -190,7 +190,7 @@

    QF_DT (Unsat Core Track)

    - + z3-4.8.17n 0 186395101653.122101658.05675 @@ -199,7 +199,7 @@

    QF_DT (Unsat Core Track)

    - + cvc5 0 76023111088.214111089.74184 @@ -208,7 +208,7 @@

    QF_DT (Unsat Core Track)

    - + smtinterpol 0 56016117410.656113765.22187 @@ -232,7 +232,6 @@

    QF_DT (Unsat Core Track)

    - + - diff --git a/archive/2022/results/qf-equality-bitvec-incremental.html b/archive/2022/results/qf-equality-bitvec-incremental.html index b0d4280a..cad9c7ee 100644 --- a/archive/2022/results/qf-equality-bitvec-incremental.html +++ b/archive/2022/results/qf-equality-bitvec-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Equality+Bitvec (Incremental Track)

    Competition results for the QF_Equality+Bitvec - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_Equality+Bitvec (Incremental Track)

    Parallel Performance Yices2 - - + + @@ -124,7 +124,7 @@

    QF_Equality+Bitvec (Incremental Track)

    - + Yices2 0 500232309.39832237.49357021 @@ -133,7 +133,7 @@

    QF_Equality+Bitvec (Incremental Track)

    - + 2020-Yices2 incrementaln 0 500131787.42531727.61358021 @@ -142,7 +142,7 @@

    QF_Equality+Bitvec (Incremental Track)

    - + z3-4.8.17n 0 4754202951.875202886.4333050150 @@ -151,7 +151,7 @@

    QF_Equality+Bitvec (Incremental Track)

    - + MathSATn 0 473253111.04653000.544327038 @@ -160,7 +160,7 @@

    QF_Equality+Bitvec (Incremental Track)

    - + cvc5 0 419978427.01978247.941860054 @@ -169,7 +169,7 @@

    QF_Equality+Bitvec (Incremental Track)

    - + Bitwuzla 0 418151837.46251795.353878031 @@ -193,7 +193,6 @@

    QF_Equality+Bitvec (Incremental Track)

    - + - diff --git a/archive/2022/results/qf-equality-bitvec-model-validation.html b/archive/2022/results/qf-equality-bitvec-model-validation.html index a30972fb..68d3e528 100644 --- a/archive/2022/results/qf-equality-bitvec-model-validation.html +++ b/archive/2022/results/qf-equality-bitvec-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Equality+Bitvec (Model Validation Track)

    Competition results for the QF_Equality+Bitvec - + division - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_Equality+Bitvec (Model Validation Track)

    Sequential PerformanceParallel Performance BitwuzlaBitwuzla - - + + @@ -126,7 +126,7 @@

    QF_Equality+Bitvec (Model Validation Track)

    - + Bitwuzla 0 373 @@ -137,7 +137,7 @@

    QF_Equality+Bitvec (Model Validation Track)

    - + 2021-Yices2 model-validationn 0 373 @@ -148,7 +148,7 @@

    QF_Equality+Bitvec (Model Validation Track)

    - + Yices2 0 373 @@ -159,7 +159,7 @@

    QF_Equality+Bitvec (Model Validation Track)

    - + z3-4.8.17n 0 363 @@ -170,7 +170,7 @@

    QF_Equality+Bitvec (Model Validation Track)

    - + cvc5 0 362 @@ -181,7 +181,7 @@

    QF_Equality+Bitvec (Model Validation Track)

    - + MathSATn 0 359 @@ -203,7 +203,7 @@

    QF_Equality+Bitvec (Model Validation Track)

    - + Bitwuzla 0 3733640.2523634.0372 @@ -212,7 +212,7 @@

    QF_Equality+Bitvec (Model Validation Track)

    - + Yices2 0 3734753.2044748.7192 @@ -221,7 +221,7 @@

    QF_Equality+Bitvec (Model Validation Track)

    - + 2021-Yices2 model-validationn 0 3734749.6514750.6822 @@ -230,7 +230,7 @@

    QF_Equality+Bitvec (Model Validation Track)

    - + z3-4.8.17n 0 36316248.79216236.56212 @@ -239,7 +239,7 @@

    QF_Equality+Bitvec (Model Validation Track)

    - + cvc5 0 36224693.8224657.91413 @@ -248,7 +248,7 @@

    QF_Equality+Bitvec (Model Validation Track)

    - + MathSATn 0 35920486.96620487.77616 @@ -272,7 +272,6 @@

    QF_Equality+Bitvec (Model Validation Track)

    - + - diff --git a/archive/2022/results/qf-equality-bitvec-proof-exhibition.html b/archive/2022/results/qf-equality-bitvec-proof-exhibition.html index 78813625..d9c51611 100644 --- a/archive/2022/results/qf-equality-bitvec-proof-exhibition.html +++ b/archive/2022/results/qf-equality-bitvec-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Equality+Bitvec (Proof Exhibition Track)

    Competition results for the QF_Equality+Bitvec - + division - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_Equality+Bitvec (Proof Exhibition Track)

    - + cvc5-lfsc 0 2305 @@ -130,7 +130,7 @@

    QF_Equality+Bitvec (Proof Exhibition Track)

    - + cvc5 0 1614 @@ -152,7 +152,7 @@

    QF_Equality+Bitvec (Proof Exhibition Track)

    - + cvc5-lfsc 0 2305599032.772598795.1224200415 @@ -161,7 +161,7 @@

    QF_Equality+Bitvec (Proof Exhibition Track)

    - + cvc5 0 16141298135.491297819.701111101057 @@ -185,7 +185,6 @@

    QF_Equality+Bitvec (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/qf-equality-bitvec-single-query.html b/archive/2022/results/qf-equality-bitvec-single-query.html index bd496330..e38012ec 100644 --- a/archive/2022/results/qf-equality-bitvec-single-query.html +++ b/archive/2022/results/qf-equality-bitvec-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Equality+Bitvec (Single Query Track)

    Competition results for the QF_Equality+Bitvec - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_Equality+Bitvec (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) BitwuzlaBitwuzlaBitwuzla - - + + Bitwuzla - - + + Bitwuzla - + @@ -131,7 +131,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + Bitwuzla 0 2116 @@ -142,7 +142,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + 2020-Bitwuzlan 0 2108 @@ -153,7 +153,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + Yices2 0 2078 @@ -164,7 +164,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + z3-4.8.17n 0 2017 @@ -175,7 +175,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + cvc5 0 2014 @@ -186,7 +186,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + MathSATn 0 2011 @@ -208,7 +208,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + Bitwuzla 0 211667456.39167445.8682116131779952037 @@ -217,7 +217,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + 2020-Bitwuzlan 0 210866430.06366439.2832108131779160036 @@ -226,7 +226,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + Yices2 0 2078158052.771158042.5872078131276690090 @@ -235,7 +235,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + z3-4.8.17n 0 2017235131.58235057.18201712537641510150 @@ -244,7 +244,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + cvc5 0 2014238221.866238331.869201412597551540154 @@ -253,7 +253,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + MathSATn 0 2011234201.91234184.834201112607511570152 @@ -273,7 +273,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + 2020-Bitwuzlan 0 131713733.5113739.411317131701283936 @@ -282,7 +282,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + Bitwuzla 0 131714760.68914748.3171317131701283937 @@ -291,7 +291,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + Yices2 0 131231410.65331396.9741312131201783990 @@ -300,7 +300,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + MathSATn 0 126095231.64995219.10512601260069839152 @@ -309,7 +309,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + cvc5 0 1259101600.645101758.19812591259070839154 @@ -318,7 +318,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + z3-4.8.17n 0 1253115008.607114984.00812531253076839150 @@ -338,7 +338,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + Bitwuzla 0 79947895.70247897.551799079936133337 @@ -347,7 +347,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + 2020-Bitwuzlan 0 79147896.55347899.873791079144133336 @@ -356,7 +356,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + Yices2 0 766121842.118121845.613766076669133390 @@ -365,7 +365,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + z3-4.8.17n 0 764116253.972116204.1587640764711333150 @@ -374,7 +374,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + cvc5 0 755131821.221131773.6717550755801333154 @@ -383,7 +383,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + MathSATn 0 751134170.261134165.7297510751841333152 @@ -403,7 +403,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + Bitwuzla 0 19556728.8876713.752195512447112130198 @@ -412,7 +412,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + 2020-Bitwuzlan 0 19476567.6696571.506194712387092210197 @@ -421,7 +421,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + Yices2 0 19397087.717070.217193912646752290229 @@ -430,7 +430,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + MathSATn 0 183610796.86510769.898183611876493320327 @@ -439,7 +439,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + z3-4.8.17n 0 181310391.83810368.236181311676463550355 @@ -448,7 +448,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + cvc5 0 181113701.50613623.614181111696423570357 @@ -472,7 +472,6 @@

    QF_Equality+Bitvec (Single Query Track)

    - + - diff --git a/archive/2022/results/qf-equality-bitvec-unsat-core.html b/archive/2022/results/qf-equality-bitvec-unsat-core.html index 2c24bc01..3e68892a 100644 --- a/archive/2022/results/qf-equality-bitvec-unsat-core.html +++ b/archive/2022/results/qf-equality-bitvec-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Equality+Bitvec (Unsat Core Track)

    Competition results for the QF_Equality+Bitvec - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_Equality+Bitvec (Unsat Core Track)

    Sequential PerformanceParallel Performance BitwuzlaBitwuzla - - + + @@ -126,7 +126,7 @@

    QF_Equality+Bitvec (Unsat Core Track)

    - + Bitwuzla 0 1183140 @@ -137,7 +137,7 @@

    QF_Equality+Bitvec (Unsat Core Track)

    - + 2021-Bitwuzlan 0 1177598 @@ -148,7 +148,7 @@

    QF_Equality+Bitvec (Unsat Core Track)

    - + Yices2 0 901667 @@ -159,7 +159,7 @@

    QF_Equality+Bitvec (Unsat Core Track)

    - + z3-4.8.17n 0 897338 @@ -170,7 +170,7 @@

    QF_Equality+Bitvec (Unsat Core Track)

    - + cvc5 0 141993 @@ -181,7 +181,7 @@

    QF_Equality+Bitvec (Unsat Core Track)

    - + MathSATn 0 73 @@ -203,7 +203,7 @@

    QF_Equality+Bitvec (Unsat Core Track)

    - + Bitwuzla 0 118314036762.2736742.26912 @@ -212,7 +212,7 @@

    QF_Equality+Bitvec (Unsat Core Track)

    - + 2021-Bitwuzlan 0 117759841225.60541138.20414 @@ -221,7 +221,7 @@

    QF_Equality+Bitvec (Unsat Core Track)

    - + Yices2 0 90166786379.17586349.64163 @@ -230,7 +230,7 @@

    QF_Equality+Bitvec (Unsat Core Track)

    - + z3-4.8.17n 0 89733898812.0698757.74274 @@ -239,7 +239,7 @@

    QF_Equality+Bitvec (Unsat Core Track)

    - + cvc5 0 14199387383.21287342.24455 @@ -248,7 +248,7 @@

    QF_Equality+Bitvec (Unsat Core Track)

    - + MathSATn 0 7378.33779.3960 @@ -272,7 +272,6 @@

    QF_Equality+Bitvec (Unsat Core Track)

    - + - diff --git a/archive/2022/results/qf-equality-incremental.html b/archive/2022/results/qf-equality-incremental.html index bbc7443b..7809a16d 100644 --- a/archive/2022/results/qf-equality-incremental.html +++ b/archive/2022/results/qf-equality-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Equality (Incremental Track)

    Competition results for the QF_Equality - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_Equality (Incremental Track)

    Parallel Performance Yices2 - - + + @@ -124,7 +124,7 @@

    QF_Equality (Incremental Track)

    - + Yices2 0 15814135.018112.833000 @@ -133,7 +133,7 @@

    QF_Equality (Incremental Track)

    - + z3-4.8.17n 0 15814295.046270.049000 @@ -142,7 +142,7 @@

    QF_Equality (Incremental Track)

    - + 2021-z3n 0 15814349.706326.889000 @@ -151,7 +151,7 @@

    QF_Equality (Incremental Track)

    - + cvc5 0 158141009.88956.065000 @@ -160,7 +160,7 @@

    QF_Equality (Incremental Track)

    - + smtinterpol 0 158144322.3471802.314000 @@ -169,7 +169,7 @@

    QF_Equality (Incremental Track)

    - + OpenSMT 0 157831179.7681155.3783100 @@ -178,7 +178,7 @@

    QF_Equality (Incremental Track)

    - + MathSATn 0 716184.656167.2281509800 @@ -202,7 +202,6 @@

    QF_Equality (Incremental Track)

    - + - diff --git a/archive/2022/results/qf-equality-lineararith-cloud.html b/archive/2022/results/qf-equality-lineararith-cloud.html index 0f9934be..a33b222a 100644 --- a/archive/2022/results/qf-equality-lineararith-cloud.html +++ b/archive/2022/results/qf-equality-lineararith-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Equality+LinearArith (Cloud Track)

    Competition results for the QF_Equality+LinearArith - + division - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    QF_Equality+LinearArith (Cloud Track)

    - + SMTS portfolio 1 0953.411000100 @@ -126,7 +126,7 @@

    QF_Equality+LinearArith (Cloud Track)

    - + SMTS cube-and-conquer 1 0955.7000100 @@ -135,7 +135,7 @@

    QF_Equality+LinearArith (Cloud Track)

    - + SMTS cube-and-conquer (fixed) 1 0956.279000100 @@ -144,7 +144,7 @@

    QF_Equality+LinearArith (Cloud Track)

    - + cvc5-cloud 1 01200.0000100 @@ -164,7 +164,7 @@

    QF_Equality+LinearArith (Cloud Track)

    - + SMTS portfolio 1 0953.411000100 @@ -173,7 +173,7 @@

    QF_Equality+LinearArith (Cloud Track)

    - + SMTS cube-and-conquer 1 0955.7000100 @@ -182,7 +182,7 @@

    QF_Equality+LinearArith (Cloud Track)

    - + SMTS cube-and-conquer (fixed) 1 0956.279000100 @@ -191,7 +191,7 @@

    QF_Equality+LinearArith (Cloud Track)

    - + cvc5-cloud 1 01200.0000100 @@ -211,7 +211,7 @@

    QF_Equality+LinearArith (Cloud Track)

    - + SMTS cube-and-conquer 0 00.0000010 @@ -220,7 +220,7 @@

    QF_Equality+LinearArith (Cloud Track)

    - + SMTS cube-and-conquer (fixed) 0 00.0000010 @@ -229,7 +229,7 @@

    QF_Equality+LinearArith (Cloud Track)

    - + SMTS portfolio 0 00.0000010 @@ -238,7 +238,7 @@

    QF_Equality+LinearArith (Cloud Track)

    - + cvc5-cloud 0 00.0000010 @@ -258,7 +258,7 @@

    QF_Equality+LinearArith (Cloud Track)

    - + SMTS cube-and-conquer 0 024.0000101 @@ -267,7 +267,7 @@

    QF_Equality+LinearArith (Cloud Track)

    - + SMTS cube-and-conquer (fixed) 0 024.0000101 @@ -276,7 +276,7 @@

    QF_Equality+LinearArith (Cloud Track)

    - + SMTS portfolio 0 024.0000101 @@ -285,7 +285,7 @@

    QF_Equality+LinearArith (Cloud Track)

    - + cvc5-cloud 0 024.0000101 @@ -309,7 +309,6 @@

    QF_Equality+LinearArith (Cloud Track)

    - + - diff --git a/archive/2022/results/qf-equality-lineararith-incremental.html b/archive/2022/results/qf-equality-lineararith-incremental.html index 68e0ddcd..f7252ac9 100644 --- a/archive/2022/results/qf-equality-lineararith-incremental.html +++ b/archive/2022/results/qf-equality-lineararith-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Equality+LinearArith (Incremental Track)

    Competition results for the QF_Equality+LinearArith - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_Equality+LinearArith (Incremental Track)

    Parallel Performance smtinterpol - - + + @@ -124,7 +124,7 @@

    QF_Equality+LinearArith (Incremental Track)

    - + 2020-z3n 0 5684322104533.434103917.43448649068 @@ -133,7 +133,7 @@

    QF_Equality+LinearArith (Incremental Track)

    - + z3-4.8.17n 0 5546960106176.277105551.876186011068 @@ -142,7 +142,7 @@

    QF_Equality+LinearArith (Incremental Track)

    - + smtinterpol 0 4736653215977.283198670.0679963180127 @@ -151,7 +151,7 @@

    QF_Equality+LinearArith (Incremental Track)

    - + cvc5 0 4519529212794.778211949.04712134420121 @@ -160,7 +160,7 @@

    QF_Equality+LinearArith (Incremental Track)

    - + Yices2 0 4073653112142.344111568.941659318078 @@ -169,7 +169,7 @@

    QF_Equality+LinearArith (Incremental Track)

    - + MathSATn 0 3041157139442.691138811.7626918140100 @@ -178,7 +178,7 @@

    QF_Equality+LinearArith (Incremental Track)

    - + OpenSMT 0 30242206119.521205790.8634724670141 @@ -202,7 +202,6 @@

    QF_Equality+LinearArith (Incremental Track)

    - + - diff --git a/archive/2022/results/qf-equality-lineararith-model-validation.html b/archive/2022/results/qf-equality-lineararith-model-validation.html index 05eaba3a..1c966f3f 100644 --- a/archive/2022/results/qf-equality-lineararith-model-validation.html +++ b/archive/2022/results/qf-equality-lineararith-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Equality+LinearArith (Model Validation Track)

    Competition results for the QF_Equality+LinearArith - + division - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_Equality+LinearArith (Model Validation Track)

    Sequential PerformanceParallel Performance smtinterpolsmtinterpol - - + + @@ -126,7 +126,7 @@

    QF_Equality+LinearArith (Model Validation Track)

    - + smtinterpol 0 880 @@ -137,7 +137,7 @@

    QF_Equality+LinearArith (Model Validation Track)

    - + 2021-SMTInterpoln 0 880 @@ -148,7 +148,7 @@

    QF_Equality+LinearArith (Model Validation Track)

    - + cvc5 0 853 @@ -159,7 +159,7 @@

    QF_Equality+LinearArith (Model Validation Track)

    - + z3-4.8.17n 0 852 @@ -170,7 +170,7 @@

    QF_Equality+LinearArith (Model Validation Track)

    - + Yices2 0 825 @@ -181,7 +181,7 @@

    QF_Equality+LinearArith (Model Validation Track)

    - + OpenSMT 0 681 @@ -192,7 +192,7 @@

    QF_Equality+LinearArith (Model Validation Track)

    - + MathSATn 0 538 @@ -214,7 +214,7 @@

    QF_Equality+LinearArith (Model Validation Track)

    - + smtinterpol 0 88026314.57322993.20110 @@ -223,7 +223,7 @@

    QF_Equality+LinearArith (Model Validation Track)

    - + 2021-SMTInterpoln 0 88027162.6623792.86810 @@ -232,7 +232,7 @@

    QF_Equality+LinearArith (Model Validation Track)

    - + cvc5 0 85374799.91974806.60738 @@ -241,7 +241,7 @@

    QF_Equality+LinearArith (Model Validation Track)

    - + z3-4.8.17n 0 85252269.1152115.46339 @@ -250,7 +250,7 @@

    QF_Equality+LinearArith (Model Validation Track)

    - + Yices2 0 82595274.72295278.98866 @@ -259,7 +259,7 @@

    QF_Equality+LinearArith (Model Validation Track)

    - + OpenSMT 0 6816820.2516792.3084 @@ -268,7 +268,7 @@

    QF_Equality+LinearArith (Model Validation Track)

    - + MathSATn 0 538125093.478125093.868103 @@ -292,7 +292,6 @@

    QF_Equality+LinearArith (Model Validation Track)

    - + - diff --git a/archive/2022/results/qf-equality-lineararith-parallel.html b/archive/2022/results/qf-equality-lineararith-parallel.html index 5be68f34..1694314c 100644 --- a/archive/2022/results/qf-equality-lineararith-parallel.html +++ b/archive/2022/results/qf-equality-lineararith-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Equality+LinearArith (Parallel Track)

    Competition results for the QF_Equality+LinearArith - + division - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    QF_Equality+LinearArith (Parallel Track)

    - + SMTS portfolio 1 0174.317000100 @@ -126,7 +126,7 @@

    QF_Equality+LinearArith (Parallel Track)

    - + SMTS cube-and-conquer (fixed) 1 0223.495000100 @@ -135,7 +135,7 @@

    QF_Equality+LinearArith (Parallel Track)

    - + SMTS cube-and-conquer 1 0241.017000100 @@ -155,7 +155,7 @@

    QF_Equality+LinearArith (Parallel Track)

    - + SMTS portfolio 1 0174.317000100 @@ -164,7 +164,7 @@

    QF_Equality+LinearArith (Parallel Track)

    - + SMTS cube-and-conquer (fixed) 1 0223.495000100 @@ -173,7 +173,7 @@

    QF_Equality+LinearArith (Parallel Track)

    - + SMTS cube-and-conquer 1 0241.017000100 @@ -193,7 +193,7 @@

    QF_Equality+LinearArith (Parallel Track)

    - + SMTS cube-and-conquer 0 00.0000010 @@ -202,7 +202,7 @@

    QF_Equality+LinearArith (Parallel Track)

    - + SMTS cube-and-conquer (fixed) 0 00.0000010 @@ -211,7 +211,7 @@

    QF_Equality+LinearArith (Parallel Track)

    - + SMTS portfolio 0 00.0000010 @@ -231,7 +231,7 @@

    QF_Equality+LinearArith (Parallel Track)

    - + SMTS cube-and-conquer 0 024.0000101 @@ -240,7 +240,7 @@

    QF_Equality+LinearArith (Parallel Track)

    - + SMTS cube-and-conquer (fixed) 0 024.0000101 @@ -249,7 +249,7 @@

    QF_Equality+LinearArith (Parallel Track)

    - + SMTS portfolio 0 024.0000101 @@ -273,7 +273,6 @@

    QF_Equality+LinearArith (Parallel Track)

    - + - diff --git a/archive/2022/results/qf-equality-lineararith-proof-exhibition.html b/archive/2022/results/qf-equality-lineararith-proof-exhibition.html index 88e99752..af890851 100644 --- a/archive/2022/results/qf-equality-lineararith-proof-exhibition.html +++ b/archive/2022/results/qf-equality-lineararith-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Equality+LinearArith (Proof Exhibition Track)

    Competition results for the QF_Equality+LinearArith - + division - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_Equality+LinearArith (Proof Exhibition Track)

    - + smtinterpol 0 1213 @@ -130,7 +130,7 @@

    QF_Equality+LinearArith (Proof Exhibition Track)

    - + cvc5-lfsc 0 1029 @@ -141,7 +141,7 @@

    QF_Equality+LinearArith (Proof Exhibition Track)

    - + veriT 0 610 @@ -152,7 +152,7 @@

    QF_Equality+LinearArith (Proof Exhibition Track)

    - + cvc5 0 423 @@ -174,7 +174,7 @@

    QF_Equality+LinearArith (Proof Exhibition Track)

    - + smtinterpol 0 1213127133.528115536.36188086 @@ -183,7 +183,7 @@

    QF_Equality+LinearArith (Proof Exhibition Track)

    - + cvc5-lfsc 0 1029355067.953355017.7882720271 @@ -192,7 +192,7 @@

    QF_Equality+LinearArith (Proof Exhibition Track)

    - + veriT 0 610133911.351133723.4396256692 @@ -201,7 +201,7 @@

    QF_Equality+LinearArith (Proof Exhibition Track)

    - + cvc5 0 4231041155.4841040946.0288780860 @@ -225,7 +225,6 @@

    QF_Equality+LinearArith (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/qf-equality-lineararith-single-query.html b/archive/2022/results/qf-equality-lineararith-single-query.html index 875f2d4b..4c18407b 100644 --- a/archive/2022/results/qf-equality-lineararith-single-query.html +++ b/archive/2022/results/qf-equality-lineararith-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Equality+LinearArith (Single Query Track)

    Competition results for the QF_Equality+LinearArith - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_Equality+LinearArith (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) smtinterpolsmtinterpolsmtinterpol - - + + Yices2 - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + z3-4.8.17n 0 1755 @@ -142,7 +142,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + 2021-SMTInterpoln 0 1742 @@ -153,7 +153,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + smtinterpol 0 1739 @@ -164,7 +164,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + Yices2 0 1737 @@ -175,7 +175,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + cvc5 0 1717 @@ -186,7 +186,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + MathSATn 0 1714 @@ -197,7 +197,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + veriT 0 1047 @@ -208,7 +208,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + OpenSMT 0 834 @@ -230,7 +230,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + z3-4.8.17n 0 175579880.15779861.366175594281350050 @@ -239,7 +239,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + 2021-SMTInterpoln 0 174996025.81585970.126174995579456046 @@ -248,7 +248,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + smtinterpol 0 1742110626.47297692.333174295978363063 @@ -257,7 +257,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + Yices2 0 173788280.59688286.553173792681159959 @@ -266,7 +266,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + cvc5 0 1717148365.278148368.146171792379488088 @@ -275,7 +275,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + MathSATn 0 1714120389.316120324.236171490780782982 @@ -284,7 +284,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + veriT 0 1047142514.934142513.7541047592455749991 @@ -293,7 +293,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + OpenSMT 0 83410573.94910562.44283455228279647 @@ -313,7 +313,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + smtinterpol 0 95916229.25313040.6929599590484263 @@ -322,7 +322,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + 2021-SMTInterpoln 0 95516953.79813384.0659559550884246 @@ -331,7 +331,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + z3-4.8.17n 0 94229013.8329010.77794294202184250 @@ -340,7 +340,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + Yices2 0 92644698.344701.53692692603284759 @@ -349,7 +349,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + cvc5 0 92363611.98463613.56692392304084288 @@ -358,7 +358,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + MathSATn 0 90763558.09663554.88690790705184782 @@ -367,7 +367,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + veriT 0 59255131.32555130.028592592036684791 @@ -376,7 +376,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + OpenSMT 0 5528079.5378070.155525520512487 @@ -396,7 +396,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + z3-4.8.17n 0 81348466.32748450.58981308132796550 @@ -405,7 +405,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + Yices2 0 81141182.29641185.01781108112596959 @@ -414,7 +414,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + MathSATn 0 80754431.2254369.34980708072996982 @@ -423,7 +423,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + 2021-SMTInterpoln 0 79476672.01770186.06179407944696546 @@ -432,7 +432,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + cvc5 0 79482353.29482354.57979407944696588 @@ -441,7 +441,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + smtinterpol 0 78391997.21982251.64278307835796563 @@ -450,7 +450,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + veriT 0 45584983.60984983.726455045538196991 @@ -459,7 +459,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + OpenSMT 0 28294.41292.2922820282015237 @@ -479,7 +479,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + z3-4.8.17n 0 16903520.7733499.7216909157751150115 @@ -488,7 +488,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + Yices2 0 16882933.3282937.70316889087801089108 @@ -497,7 +497,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + 2021-SMTInterpoln 0 166010522.3756185.76816609247361450136 @@ -506,7 +506,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + smtinterpol 0 165811498.0366789.1616589277311470147 @@ -515,7 +515,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + MathSATn 0 16374938.7494907.94416378947431599159 @@ -524,7 +524,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + cvc5 0 15816648.3576645.13315818846972240224 @@ -533,7 +533,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + veriT 0 9336905.1466900.5149335024318639224 @@ -542,7 +542,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + OpenSMT 0 8221381.7741370.0668225412811996419 @@ -566,7 +566,6 @@

    QF_Equality+LinearArith (Single Query Track)

    - + - diff --git a/archive/2022/results/qf-equality-lineararith-unsat-core.html b/archive/2022/results/qf-equality-lineararith-unsat-core.html index 807cbd3d..8ff7257c 100644 --- a/archive/2022/results/qf-equality-lineararith-unsat-core.html +++ b/archive/2022/results/qf-equality-lineararith-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Equality+LinearArith (Unsat Core Track)

    Competition results for the QF_Equality+LinearArith - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_Equality+LinearArith (Unsat Core Track)

    Sequential PerformanceParallel Performance Yices2Yices2 - - + + @@ -126,7 +126,7 @@

    QF_Equality+LinearArith (Unsat Core Track)

    - + Yices2 0 821341 @@ -137,7 +137,7 @@

    QF_Equality+LinearArith (Unsat Core Track)

    - + MathSATn 0 800757 @@ -148,7 +148,7 @@

    QF_Equality+LinearArith (Unsat Core Track)

    - + 2021-MathSAT5n 0 781706 @@ -159,7 +159,7 @@

    QF_Equality+LinearArith (Unsat Core Track)

    - + z3-4.8.17n 0 468939 @@ -170,7 +170,7 @@

    QF_Equality+LinearArith (Unsat Core Track)

    - + smtinterpol 0 187293 @@ -181,7 +181,7 @@

    QF_Equality+LinearArith (Unsat Core Track)

    - + cvc5 0 54592 @@ -203,7 +203,7 @@

    QF_Equality+LinearArith (Unsat Core Track)

    - + Yices2 0 82134172865.22572871.50838 @@ -212,7 +212,7 @@

    QF_Equality+LinearArith (Unsat Core Track)

    - + MathSATn 0 80075760672.67660678.89835 @@ -221,7 +221,7 @@

    QF_Equality+LinearArith (Unsat Core Track)

    - + 2021-MathSAT5n 0 78170662530.51962670.21137 @@ -230,7 +230,7 @@

    QF_Equality+LinearArith (Unsat Core Track)

    - + z3-4.8.17n 0 46893963342.51263347.29529 @@ -239,7 +239,7 @@

    QF_Equality+LinearArith (Unsat Core Track)

    - + smtinterpol 0 213113118037.053111784.76185 @@ -248,7 +248,7 @@

    QF_Equality+LinearArith (Unsat Core Track)

    - + cvc5 0 54592117721.793117719.13294 @@ -272,7 +272,6 @@

    QF_Equality+LinearArith (Unsat Core Track)

    - + - diff --git a/archive/2022/results/qf-equality-model-validation.html b/archive/2022/results/qf-equality-model-validation.html index b68b934b..8fb83672 100644 --- a/archive/2022/results/qf-equality-model-validation.html +++ b/archive/2022/results/qf-equality-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Equality (Model Validation Track)

    Competition results for the QF_Equality - + division - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_Equality (Model Validation Track)

    Sequential PerformanceParallel Performance Yices2Yices2 - - + + @@ -126,7 +126,7 @@

    QF_Equality (Model Validation Track)

    - + Yices2 0 1571 @@ -137,7 +137,7 @@

    QF_Equality (Model Validation Track)

    - + 2021-Yices2 model-validationn 0 1571 @@ -148,7 +148,7 @@

    QF_Equality (Model Validation Track)

    - + z3-4.8.17n 0 1571 @@ -159,7 +159,7 @@

    QF_Equality (Model Validation Track)

    - + cvc5 0 1571 @@ -170,7 +170,7 @@

    QF_Equality (Model Validation Track)

    - + smtinterpol 0 1571 @@ -181,7 +181,7 @@

    QF_Equality (Model Validation Track)

    - + OpenSMT 0 1551 @@ -192,7 +192,7 @@

    QF_Equality (Model Validation Track)

    - + MathSATn 0 636 @@ -214,7 +214,7 @@

    QF_Equality (Model Validation Track)

    - + 2021-Yices2 model-validationn 0 157170.27372.5630 @@ -223,7 +223,7 @@

    QF_Equality (Model Validation Track)

    - + Yices2 0 157169.01574.4250 @@ -232,7 +232,7 @@

    QF_Equality (Model Validation Track)

    - + z3-4.8.17n 0 1571197.3178.3910 @@ -241,7 +241,7 @@

    QF_Equality (Model Validation Track)

    - + cvc5 0 1571797.319801.2650 @@ -250,7 +250,7 @@

    QF_Equality (Model Validation Track)

    - + smtinterpol 0 15714560.9742000.3020 @@ -259,7 +259,7 @@

    QF_Equality (Model Validation Track)

    - + OpenSMT 0 1551297.822305.3190 @@ -268,7 +268,7 @@

    QF_Equality (Model Validation Track)

    - + MathSATn 0 636230.228226.8710 @@ -292,7 +292,6 @@

    QF_Equality (Model Validation Track)

    - + - diff --git a/archive/2022/results/qf-equality-nonlineararith-incremental.html b/archive/2022/results/qf-equality-nonlineararith-incremental.html index fae68554..fbf28e13 100644 --- a/archive/2022/results/qf-equality-nonlineararith-incremental.html +++ b/archive/2022/results/qf-equality-nonlineararith-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Equality+NonLinearArith (Incremental Track)

    Competition results for the QF_Equality+NonLinearArith - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_Equality+NonLinearArith (Incremental Track)

    Parallel Performance smtinterpol - - + + @@ -124,7 +124,7 @@

    QF_Equality+NonLinearArith (Incremental Track)

    - + z3-4.8.17n 0 64995101.24194.702100 @@ -133,7 +133,7 @@

    QF_Equality+NonLinearArith (Incremental Track)

    - + 2021-z3n 0 64995140.074133.536100 @@ -142,7 +142,7 @@

    QF_Equality+NonLinearArith (Incremental Track)

    - + smtinterpol 0 631971876.814736.822179900 @@ -151,7 +151,7 @@

    QF_Equality+NonLinearArith (Incremental Track)

    - + cvc5 0 567014345.9144308.375829501 @@ -160,7 +160,7 @@

    QF_Equality+NonLinearArith (Incremental Track)

    - + MathSATn 0 1210120.73428.2295289500 @@ -169,7 +169,7 @@

    QF_Equality+NonLinearArith (Incremental Track)

    - + Yices2 0 01.8796.8821326600 @@ -193,7 +193,6 @@

    QF_Equality+NonLinearArith (Incremental Track)

    - + - diff --git a/archive/2022/results/qf-equality-nonlineararith-proof-exhibition.html b/archive/2022/results/qf-equality-nonlineararith-proof-exhibition.html index 42ba49ce..47966f24 100644 --- a/archive/2022/results/qf-equality-nonlineararith-proof-exhibition.html +++ b/archive/2022/results/qf-equality-nonlineararith-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Equality+NonLinearArith (Proof Exhibition Track)

    Competition results for the QF_Equality+NonLinearArith - + division - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_Equality+NonLinearArith (Proof Exhibition Track)

    - + cvc5-lfsc 0 225 @@ -130,7 +130,7 @@

    QF_Equality+NonLinearArith (Proof Exhibition Track)

    - + cvc5 0 193 @@ -141,7 +141,7 @@

    QF_Equality+NonLinearArith (Proof Exhibition Track)

    - + smtinterpol 0 102 @@ -163,7 +163,7 @@

    QF_Equality+NonLinearArith (Proof Exhibition Track)

    - + cvc5-lfsc 0 22526402.10126368.01421021 @@ -172,7 +172,7 @@

    QF_Equality+NonLinearArith (Proof Exhibition Track)

    - + cvc5 0 19360366.16960319.29853048 @@ -181,7 +181,7 @@

    QF_Equality+NonLinearArith (Proof Exhibition Track)

    - + smtinterpol 0 10212753.7312252.24714409 @@ -205,7 +205,6 @@

    QF_Equality+NonLinearArith (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/qf-equality-nonlineararith-single-query.html b/archive/2022/results/qf-equality-nonlineararith-single-query.html index 6db2b36a..a0cbb136 100644 --- a/archive/2022/results/qf-equality-nonlineararith-single-query.html +++ b/archive/2022/results/qf-equality-nonlineararith-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Equality+NonLinearArith (Single Query Track)

    Competition results for the QF_Equality+NonLinearArith - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_Equality+NonLinearArith (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + 2020-CVC4n 0 326 @@ -142,7 +142,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + cvc5 0 326 @@ -153,7 +153,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + z3-4.8.17n 0 322 @@ -164,7 +164,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + MathSATn 0 260 @@ -175,7 +175,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + Yices2 0 246 @@ -186,7 +186,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + smtinterpol 0 169 @@ -197,7 +197,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + veriT+raSAT+Redlog 0 1 @@ -219,7 +219,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + 2020-CVC4n 0 32653847.90853845.5393262418561041 @@ -228,7 +228,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + cvc5 0 32680184.05880162.4543262418561061 @@ -237,7 +237,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + z3-4.8.17n 0 32281533.93981524.2053222299365065 @@ -246,7 +246,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + MathSATn 0 26091471.39891471.79626019466127073 @@ -255,7 +255,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + Yices2 0 24643422.88443423.767246194523011130 @@ -264,7 +264,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + smtinterpol 0 16913739.71113207.96816913138218010 @@ -273,7 +273,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + veriT+raSAT+Redlog 0 131200.00831200.0141102636026 @@ -293,7 +293,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + 2020-CVC4n 0 24133819.35433820.09124124103411241 @@ -302,7 +302,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + cvc5 0 24146869.42246851.47224124103411261 @@ -311,7 +311,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + z3-4.8.17n 0 22958522.48458513.93822922904611265 @@ -320,7 +320,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + Yices2 0 19411020.66411021.4761941940319030 @@ -329,7 +329,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + MathSATn 0 19452907.40752907.63519419408111273 @@ -338,7 +338,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + smtinterpol 0 131835.14557.715131131014411210 @@ -347,7 +347,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + veriT+raSAT+Redlog 0 127600.00827600.0141102336326 @@ -367,7 +367,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + z3-4.8.17n 0 9315811.45415810.268930931328165 @@ -376,7 +376,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + 2020-CVC4n 0 8517627.42717624.32850852128141 @@ -385,7 +385,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + cvc5 0 8526114.63626110.982850852128161 @@ -394,7 +394,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + MathSATn 0 6631363.99131364.161660664028173 @@ -403,7 +403,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + Yices2 0 5225202.2225202.291520522131430 @@ -412,7 +412,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + smtinterpol 0 3812901.12812647.97380386828110 @@ -421,7 +421,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + veriT+raSAT+Redlog 0 02400.02400.0000238526 @@ -441,7 +441,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + z3-4.8.17n 0 3032342.6612332.4623032129184084 @@ -450,7 +450,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + 2020-CVC4n 0 2991830.1741826.8952992207988068 @@ -459,7 +459,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + cvc5 0 2882762.9672740.6112882127699099 @@ -468,7 +468,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + MathSATn 0 2472418.0082418.13424718562140086 @@ -477,7 +477,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + Yices2 0 2241276.1211276.273224172525211152 @@ -486,7 +486,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + smtinterpol 0 1631037.833697.43116313033224016 @@ -495,7 +495,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + veriT+raSAT+Redlog 0 1624.008624.0141102636026 @@ -519,7 +519,6 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + - diff --git a/archive/2022/results/qf-equality-nonlineararith-unsat-core.html b/archive/2022/results/qf-equality-nonlineararith-unsat-core.html index 9d790aaf..ce4a9348 100644 --- a/archive/2022/results/qf-equality-nonlineararith-unsat-core.html +++ b/archive/2022/results/qf-equality-nonlineararith-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Equality+NonLinearArith (Unsat Core Track)

    Competition results for the QF_Equality+NonLinearArith - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_Equality+NonLinearArith (Unsat Core Track)

    Sequential PerformanceParallel Performance smtinterpolsmtinterpol - - + + @@ -126,7 +126,7 @@

    QF_Equality+NonLinearArith (Unsat Core Track)

    - + 2021-cvc5-ucn 0 80132 @@ -137,7 +137,7 @@

    QF_Equality+NonLinearArith (Unsat Core Track)

    - + smtinterpol 0 69752 @@ -148,7 +148,7 @@

    QF_Equality+NonLinearArith (Unsat Core Track)

    - + MathSATn 0 65187 @@ -159,7 +159,7 @@

    QF_Equality+NonLinearArith (Unsat Core Track)

    - + z3-4.8.17n 0 37854 @@ -170,7 +170,7 @@

    QF_Equality+NonLinearArith (Unsat Core Track)

    - + cvc5 0 32873 @@ -192,7 +192,7 @@

    QF_Equality+NonLinearArith (Unsat Core Track)

    - + 2021-cvc5-ucn 0 8013219787.92619787.42614 @@ -201,7 +201,7 @@

    QF_Equality+NonLinearArith (Unsat Core Track)

    - + smtinterpol 0 6975213010.97312720.46610 @@ -210,7 +210,7 @@

    QF_Equality+NonLinearArith (Unsat Core Track)

    - + MathSATn 0 6518732596.16632595.39726 @@ -219,7 +219,7 @@

    QF_Equality+NonLinearArith (Unsat Core Track)

    - + z3-4.8.17n 0 3785414872.96814874.39811 @@ -228,7 +228,7 @@

    QF_Equality+NonLinearArith (Unsat Core Track)

    - + cvc5 0 3287323141.1623141.44418 @@ -252,7 +252,6 @@

    QF_Equality+NonLinearArith (Unsat Core Track)

    - + - diff --git a/archive/2022/results/qf-equality-proof-exhibition.html b/archive/2022/results/qf-equality-proof-exhibition.html index 9f4e0f63..c54f4fdd 100644 --- a/archive/2022/results/qf-equality-proof-exhibition.html +++ b/archive/2022/results/qf-equality-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Equality (Proof Exhibition Track)

    Competition results for the QF_Equality - + division - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_Equality (Proof Exhibition Track)

    - + smtinterpol 0 2384 @@ -130,7 +130,7 @@

    QF_Equality (Proof Exhibition Track)

    - + cvc5-lfsc 0 2380 @@ -141,7 +141,7 @@

    QF_Equality (Proof Exhibition Track)

    - + OpenSMT 0 2170 @@ -152,7 +152,7 @@

    QF_Equality (Proof Exhibition Track)

    - + veriT 0 2133 @@ -163,7 +163,7 @@

    QF_Equality (Proof Exhibition Track)

    - + cvc5 0 236 @@ -185,7 +185,7 @@

    QF_Equality (Proof Exhibition Track)

    - + smtinterpol 0 2384132687.489111585.43675075 @@ -194,7 +194,7 @@

    QF_Equality (Proof Exhibition Track)

    - + cvc5-lfsc 0 2380328508.118328315.09579077 @@ -203,7 +203,7 @@

    QF_Equality (Proof Exhibition Track)

    - + OpenSMT 0 217023006.8622851.2651027910 @@ -212,7 +212,7 @@

    QF_Equality (Proof Exhibition Track)

    - + veriT 0 213364642.66164306.8334727938 @@ -221,7 +221,7 @@

    QF_Equality (Proof Exhibition Track)

    - + cvc5 0 2362595939.5432595313.045222302114 @@ -245,7 +245,6 @@

    QF_Equality (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/qf-equality-single-query.html b/archive/2022/results/qf-equality-single-query.html index 72cb44f3..79fdfb73 100644 --- a/archive/2022/results/qf-equality-single-query.html +++ b/archive/2022/results/qf-equality-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Equality (Single Query Track)

    Competition results for the QF_Equality - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_Equality (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Yices2Yices2Yices2 - - + + Yices2 - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_Equality (Single Query Track)

    - + Yices2 0 3793 @@ -142,7 +142,7 @@

    QF_Equality (Single Query Track)

    - + OpenSMT 0 3792 @@ -153,7 +153,7 @@

    QF_Equality (Single Query Track)

    - + z3-4.8.17n 0 3791 @@ -164,7 +164,7 @@

    QF_Equality (Single Query Track)

    - + 2021-z3n 0 3791 @@ -175,7 +175,7 @@

    QF_Equality (Single Query Track)

    - + cvc5 0 3789 @@ -186,7 +186,7 @@

    QF_Equality (Single Query Track)

    - + smtinterpol 0 3709 @@ -197,7 +197,7 @@

    QF_Equality (Single Query Track)

    - + MathSATn 0 3701 @@ -208,7 +208,7 @@

    QF_Equality (Single Query Track)

    - + veriT 0 3492 @@ -219,7 +219,7 @@

    QF_Equality (Single Query Track)

    - + OpenSMT-fixedn 0 300 @@ -241,7 +241,7 @@

    QF_Equality (Single Query Track)

    - + Yices2 0 37931937.0791943.791379315602233000 @@ -250,7 +250,7 @@

    QF_Equality (Single Query Track)

    - + OpenSMT 0 37929412.7739388.692379215602232101 @@ -259,7 +259,7 @@

    QF_Equality (Single Query Track)

    - + z3-4.8.17n 0 37917716.3647690.049379115602231202 @@ -268,7 +268,7 @@

    QF_Equality (Single Query Track)

    - + 2021-z3n 0 37918114.6238071.484379115602231202 @@ -277,7 +277,7 @@

    QF_Equality (Single Query Track)

    - + cvc5 0 378911507.99811459.954378915602229404 @@ -286,7 +286,7 @@

    QF_Equality (Single Query Track)

    - + smtinterpol 0 3723144373.36113696.49237231560216370070 @@ -295,7 +295,7 @@

    QF_Equality (Single Query Track)

    - + MathSATn 0 370158313.61758240.02137011526217592044 @@ -304,7 +304,7 @@

    QF_Equality (Single Query Track)

    - + veriT 0 34922944.2832944.53934921429206313001 @@ -313,7 +313,7 @@

    QF_Equality (Single Query Track)

    - + OpenSMT-fixedn 0 300454.197439.238300131169034930 @@ -333,7 +333,7 @@

    QF_Equality (Single Query Track)

    - + Yices2 0 156051.5356.94156015600022330 @@ -342,7 +342,7 @@

    QF_Equality (Single Query Track)

    - + z3-4.8.17n 0 1560177.632170.237156015600022332 @@ -351,7 +351,7 @@

    QF_Equality (Single Query Track)

    - + 2021-z3n 0 1560228.434228.736156015600022332 @@ -360,7 +360,7 @@

    QF_Equality (Single Query Track)

    - + OpenSMT 0 1560312.361307.737156015600022331 @@ -369,7 +369,7 @@

    QF_Equality (Single Query Track)

    - + cvc5 0 1560811.623804.957156015600022334 @@ -378,7 +378,7 @@

    QF_Equality (Single Query Track)

    - + smtinterpol 0 15604353.6791797.3091560156000223370 @@ -387,7 +387,7 @@

    QF_Equality (Single Query Track)

    - + MathSATn 0 1526184.348184.6315261526034223344 @@ -396,7 +396,7 @@

    QF_Equality (Single Query Track)

    - + veriT 0 1429140.966140.974142914290023641 @@ -405,7 +405,7 @@

    QF_Equality (Single Query Track)

    - + OpenSMT-fixedn 0 13123.73123.9181311310036620 @@ -425,7 +425,7 @@

    QF_Equality (Single Query Track)

    - + Yices2 0 22331885.551886.851223302233015600 @@ -434,7 +434,7 @@

    QF_Equality (Single Query Track)

    - + OpenSMT 0 22329100.4119080.955223202232115601 @@ -443,7 +443,7 @@

    QF_Equality (Single Query Track)

    - + z3-4.8.17n 0 22317538.7327519.812223102231215602 @@ -452,7 +452,7 @@

    QF_Equality (Single Query Track)

    - + 2021-z3n 0 22317886.1897842.748223102231215602 @@ -461,7 +461,7 @@

    QF_Equality (Single Query Track)

    - + cvc5 0 222910696.37510654.997222902229415604 @@ -470,7 +470,7 @@

    QF_Equality (Single Query Track)

    - + MathSATn 0 217558129.26958055.39121750217558156044 @@ -479,7 +479,7 @@

    QF_Equality (Single Query Track)

    - + smtinterpol 0 2163140019.681111899.18321630216370156070 @@ -488,7 +488,7 @@

    QF_Equality (Single Query Track)

    - + veriT 0 20632803.3172803.566206302063117291 @@ -497,7 +497,7 @@

    QF_Equality (Single Query Track)

    - + OpenSMT-fixedn 0 169430.467415.3211690169036240 @@ -517,7 +517,7 @@

    QF_Equality (Single Query Track)

    - + Yices2 0 3790400.707407.423379015602230303 @@ -526,7 +526,7 @@

    QF_Equality (Single Query Track)

    - + z3-4.8.17n 0 37532493.7152467.07437531560219340040 @@ -535,7 +535,7 @@

    QF_Equality (Single Query Track)

    - + 2021-z3n 0 37512573.6892530.05337511559219242042 @@ -544,7 +544,7 @@

    QF_Equality (Single Query Track)

    - + cvc5 0 37403683.893672.95237401559218153053 @@ -553,7 +553,7 @@

    QF_Equality (Single Query Track)

    - + OpenSMT 0 37383463.9743439.11237381560217855055 @@ -562,7 +562,7 @@

    QF_Equality (Single Query Track)

    - + MathSATn 0 36643204.2993196.704366415262138129081 @@ -571,7 +571,7 @@

    QF_Equality (Single Query Track)

    - + smtinterpol 0 364426464.90213252.283644155920851490149 @@ -580,7 +580,7 @@

    QF_Equality (Single Query Track)

    - + veriT 0 3489552.068552.1334891429206043004 @@ -589,7 +589,7 @@

    QF_Equality (Single Query Track)

    - + OpenSMT-fixedn 0 297395.676380.702297131166334933 @@ -613,7 +613,6 @@

    QF_Equality (Single Query Track)

    - + - diff --git a/archive/2022/results/qf-equality-unsat-core.html b/archive/2022/results/qf-equality-unsat-core.html index 3edec721..68ddb24e 100644 --- a/archive/2022/results/qf-equality-unsat-core.html +++ b/archive/2022/results/qf-equality-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Equality (Unsat Core Track)

    Competition results for the QF_Equality - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_Equality (Unsat Core Track)

    Sequential PerformanceParallel Performance Yices2Yices2 - - + + @@ -126,7 +126,7 @@

    QF_Equality (Unsat Core Track)

    - + z3-4.8.17n 0 287599 @@ -137,7 +137,7 @@

    QF_Equality (Unsat Core Track)

    - + 2021-z3n 0 286818 @@ -148,7 +148,7 @@

    QF_Equality (Unsat Core Track)

    - + Yices2 0 286081 @@ -159,7 +159,7 @@

    QF_Equality (Unsat Core Track)

    - + MathSATn 0 249831 @@ -170,7 +170,7 @@

    QF_Equality (Unsat Core Track)

    - + smtinterpol 0 244920 @@ -181,7 +181,7 @@

    QF_Equality (Unsat Core Track)

    - + cvc5 0 181481 @@ -203,7 +203,7 @@

    QF_Equality (Unsat Core Track)

    - + z3-4.8.17n 0 2875995328.8685313.9970 @@ -212,7 +212,7 @@

    QF_Equality (Unsat Core Track)

    - + 2021-z3n 0 2868185804.1165857.9380 @@ -221,7 +221,7 @@

    QF_Equality (Unsat Core Track)

    - + Yices2 0 2860814521.1474524.7160 @@ -230,7 +230,7 @@

    QF_Equality (Unsat Core Track)

    - + MathSATn 0 2498312135.4262115.3920 @@ -239,7 +239,7 @@

    QF_Equality (Unsat Core Track)

    - + smtinterpol 0 24924064065.49842769.96919 @@ -248,7 +248,7 @@

    QF_Equality (Unsat Core Track)

    - + cvc5 0 1814817924.4937909.7181 @@ -272,7 +272,6 @@

    QF_Equality (Unsat Core Track)

    - + - diff --git a/archive/2022/results/qf-fp-incremental.html b/archive/2022/results/qf-fp-incremental.html index 29c43dbb..c43d44e8 100644 --- a/archive/2022/results/qf-fp-incremental.html +++ b/archive/2022/results/qf-fp-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_FP (Incremental Track)

    Competition results for the QF_FP - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_FP (Incremental Track)

    Parallel Performance Bitwuzla - - + + @@ -124,7 +124,7 @@

    QF_FP (Incremental Track)

    - + Bitwuzla 0 6635.6415.63600 @@ -133,7 +133,7 @@

    QF_FP (Incremental Track)

    - + 2021-Bitwuzla - fixedn 0 6635.7835.7700 @@ -142,7 +142,7 @@

    QF_FP (Incremental Track)

    - + MathSATn 0 66318.78218.75300 @@ -151,7 +151,7 @@

    QF_FP (Incremental Track)

    - + cvc5 0 66319.60219.49100 @@ -160,7 +160,7 @@

    QF_FP (Incremental Track)

    - + z3-4.8.17n 0 66373.54773.56600 @@ -184,7 +184,6 @@

    QF_FP (Incremental Track)

    - + - diff --git a/archive/2022/results/qf-fp-model-validation.html b/archive/2022/results/qf-fp-model-validation.html index c3ea0b9c..2cd3baaa 100644 --- a/archive/2022/results/qf-fp-model-validation.html +++ b/archive/2022/results/qf-fp-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_FP (Model Validation Track)

    Competition results for the QF_FP - + logic - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_FP (Model Validation Track)

    Sequential PerformanceParallel Performance BitwuzlaBitwuzla - - + + @@ -126,7 +126,7 @@

    QF_FP (Model Validation Track)

    - + Bitwuzla 0 10103 @@ -137,7 +137,7 @@

    QF_FP (Model Validation Track)

    - + cvc5 0 10101 @@ -148,7 +148,7 @@

    QF_FP (Model Validation Track)

    - + z3-4.8.17n 0 10076 @@ -159,7 +159,7 @@

    QF_FP (Model Validation Track)

    - + MathSATn 0 8673 @@ -181,7 +181,7 @@

    QF_FP (Model Validation Track)

    - + Bitwuzla 0 101038361.08439.3693 @@ -190,7 +190,7 @@

    QF_FP (Model Validation Track)

    - + cvc5 0 1010112008.32912014.0835 @@ -199,7 +199,7 @@

    QF_FP (Model Validation Track)

    - + z3-4.8.17n 0 1007633478.95739292.98317 @@ -208,7 +208,7 @@

    QF_FP (Model Validation Track)

    - + MathSATn 0 867321184.54521171.74514 @@ -232,7 +232,6 @@

    QF_FP (Model Validation Track)

    - + - diff --git a/archive/2022/results/qf-fp-proof-exhibition.html b/archive/2022/results/qf-fp-proof-exhibition.html index bf417018..28c26b75 100644 --- a/archive/2022/results/qf-fp-proof-exhibition.html +++ b/archive/2022/results/qf-fp-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_FP (Proof Exhibition Track)

    Competition results for the QF_FP - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_FP (Proof Exhibition Track)

    - + cvc5-lfsc 0 10015 @@ -130,7 +130,7 @@

    QF_FP (Proof Exhibition Track)

    - + cvc5 0 10013 @@ -152,7 +152,7 @@

    QF_FP (Proof Exhibition Track)

    - + cvc5-lfsc 0 1001590586.41190578.1837675 @@ -161,7 +161,7 @@

    QF_FP (Proof Exhibition Track)

    - + cvc5 0 1001394083.27694073.7817878 @@ -185,7 +185,6 @@

    QF_FP (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/qf-fp-single-query.html b/archive/2022/results/qf-fp-single-query.html index 6ba658d3..2b826493 100644 --- a/archive/2022/results/qf-fp-single-query.html +++ b/archive/2022/results/qf-fp-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_FP (Single Query Track)

    Competition results for the QF_FP - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_FP (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) BitwuzlaBitwuzlaBitwuzla - - + + COLIBRI - - + + COLIBRI - + @@ -131,7 +131,7 @@

    QF_FP (Single Query Track)

    - + Bitwuzla 0 249 @@ -142,7 +142,7 @@

    QF_FP (Single Query Track)

    - + cvc5 0 240 @@ -153,7 +153,7 @@

    QF_FP (Single Query Track)

    - + COLIBRI 0 236 @@ -164,7 +164,7 @@

    QF_FP (Single Query Track)

    - + 2021-cvc5n 0 233 @@ -175,7 +175,7 @@

    QF_FP (Single Query Track)

    - + MathSATn 0 217 @@ -186,7 +186,7 @@

    QF_FP (Single Query Track)

    - + z3-4.8.17n 0 185 @@ -208,7 +208,7 @@

    QF_FP (Single Query Track)

    - + Bitwuzla 0 24962136.03562136.341249153963636 @@ -217,7 +217,7 @@

    QF_FP (Single Query Track)

    - + cvc5 0 24076954.2276918.247240151894545 @@ -226,7 +226,7 @@

    QF_FP (Single Query Track)

    - + COLIBRI 0 23660269.15660267.817236137994949 @@ -235,7 +235,7 @@

    QF_FP (Single Query Track)

    - + 2021-cvc5n 0 23382137.43782110.982233144895252 @@ -244,7 +244,7 @@

    QF_FP (Single Query Track)

    - + MathSATn 0 217100518.79100523.49217140776868 @@ -253,7 +253,7 @@

    QF_FP (Single Query Track)

    - + z3-4.8.17n 0 185130957.074139735.2381851157010078 @@ -273,7 +273,7 @@

    QF_FP (Single Query Track)

    - + Bitwuzla 0 15310596.79210595.1551531530312936 @@ -282,7 +282,7 @@

    QF_FP (Single Query Track)

    - + cvc5 0 15115064.70815065.931511510512945 @@ -291,7 +291,7 @@

    QF_FP (Single Query Track)

    - + 2021-cvc5n 0 14421803.00221779.27914414401212952 @@ -300,7 +300,7 @@

    QF_FP (Single Query Track)

    - + MathSATn 0 14027027.83227029.81914014001612968 @@ -309,7 +309,7 @@

    QF_FP (Single Query Track)

    - + COLIBRI 0 13723578.96323577.57513713701912949 @@ -318,7 +318,7 @@

    QF_FP (Single Query Track)

    - + z3-4.8.17n 0 11551613.8658546.28311511504112978 @@ -338,7 +338,7 @@

    QF_FP (Single Query Track)

    - + COLIBRI 0 9924690.19324690.241990992016649 @@ -347,7 +347,7 @@

    QF_FP (Single Query Track)

    - + Bitwuzla 0 9639539.24339541.186960962316636 @@ -356,7 +356,7 @@

    QF_FP (Single Query Track)

    - + 2021-cvc5n 0 8948334.43548331.703890893016652 @@ -365,7 +365,7 @@

    QF_FP (Single Query Track)

    - + cvc5 0 8949889.51249852.317890893016645 @@ -374,7 +374,7 @@

    QF_FP (Single Query Track)

    - + MathSATn 0 7761490.95861493.671770774216668 @@ -383,7 +383,7 @@

    QF_FP (Single Query Track)

    - + z3-4.8.17n 0 7067343.21469188.955700704916678 @@ -403,7 +403,7 @@

    QF_FP (Single Query Track)

    - + COLIBRI 0 2182045.4662044.074218129896767 @@ -412,7 +412,7 @@

    QF_FP (Single Query Track)

    - + Bitwuzla 0 1832965.6652962.63618311964102102 @@ -421,7 +421,7 @@

    QF_FP (Single Query Track)

    - + cvc5 0 1573986.7513948.02415710255128128 @@ -430,7 +430,7 @@

    QF_FP (Single Query Track)

    - + 2021-cvc5n 0 1554099.1224068.99815510154130130 @@ -439,7 +439,7 @@

    QF_FP (Single Query Track)

    - + MathSATn 0 1314337.6674337.7631319041154154 @@ -448,7 +448,7 @@

    QF_FP (Single Query Track)

    - + z3-4.8.17n 0 815348.6895342.677816120204202 @@ -472,7 +472,6 @@

    QF_FP (Single Query Track)

    - + - diff --git a/archive/2022/results/qf-fp-unsat-core.html b/archive/2022/results/qf-fp-unsat-core.html index cd5ec2e0..1e58fb3e 100644 --- a/archive/2022/results/qf-fp-unsat-core.html +++ b/archive/2022/results/qf-fp-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_FP (Unsat Core Track)

    Competition results for the QF_FP - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_FP (Unsat Core Track)

    Sequential PerformanceParallel Performance BitwuzlaBitwuzla - - + + @@ -126,7 +126,7 @@

    QF_FP (Unsat Core Track)

    - + Bitwuzla 0 154 @@ -137,7 +137,7 @@

    QF_FP (Unsat Core Track)

    - + 2021-Bitwuzlan 0 139 @@ -148,7 +148,7 @@

    QF_FP (Unsat Core Track)

    - + z3-4.8.17n 0 27 @@ -159,7 +159,7 @@

    QF_FP (Unsat Core Track)

    - + cvc5 0 1 @@ -170,7 +170,7 @@

    QF_FP (Unsat Core Track)

    - + MathSATn 0 0 @@ -192,7 +192,7 @@

    QF_FP (Unsat Core Track)

    - + Bitwuzla 0 15430580.75230524.03312 @@ -201,7 +201,7 @@

    QF_FP (Unsat Core Track)

    - + 2021-Bitwuzlan 0 13932923.09832854.36416 @@ -210,7 +210,7 @@

    QF_FP (Unsat Core Track)

    - + z3-4.8.17n 0 27846770.122846651.083697 @@ -219,7 +219,7 @@

    QF_FP (Unsat Core Track)

    - + cvc5 0 147201.57447204.46439 @@ -228,7 +228,7 @@

    QF_FP (Unsat Core Track)

    - + MathSATn 0 0167.023176.1220 @@ -252,7 +252,6 @@

    QF_FP (Unsat Core Track)

    - + - diff --git a/archive/2022/results/qf-fparith-incremental.html b/archive/2022/results/qf-fparith-incremental.html index 60a7dd0a..42d3a1be 100644 --- a/archive/2022/results/qf-fparith-incremental.html +++ b/archive/2022/results/qf-fparith-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_FPArith (Incremental Track)

    Competition results for the QF_FPArith - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_FPArith (Incremental Track)

    Parallel Performance Bitwuzla - - + + @@ -124,7 +124,7 @@

    QF_FPArith (Incremental Track)

    - + Bitwuzla 0 31121779622.34379490.03470040 @@ -133,7 +133,7 @@

    QF_FPArith (Incremental Track)

    - + MathSATn 0 310717191152.933190523.987570063 @@ -142,7 +142,7 @@

    QF_FPArith (Incremental Track)

    - + cvc5 0 2792291954774.1161954200.9873205801333 @@ -151,7 +151,7 @@

    QF_FPArith (Incremental Track)

    - + 2021-Bitwuzla - fixedn 0 27655680224.92779963.41571041 @@ -160,7 +160,7 @@

    QF_FPArith (Incremental Track)

    - + z3-4.8.17n 0 267054236.30254238.16152032 @@ -184,7 +184,6 @@

    QF_FPArith (Incremental Track)

    - + - diff --git a/archive/2022/results/qf-fparith-model-validation.html b/archive/2022/results/qf-fparith-model-validation.html index d5e08512..1696aea7 100644 --- a/archive/2022/results/qf-fparith-model-validation.html +++ b/archive/2022/results/qf-fparith-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_FPArith (Model Validation Track)

    Competition results for the QF_FPArith - + division - + in the Model Validation Track.

    @@ -119,7 +119,7 @@

    QF_FPArith (Model Validation Track)

    - + Bitwuzla 0 17294 @@ -130,7 +130,7 @@

    QF_FPArith (Model Validation Track)

    - + cvc5 0 17271 @@ -141,7 +141,7 @@

    QF_FPArith (Model Validation Track)

    - + z3-4.8.17n 0 17254 @@ -152,7 +152,7 @@

    QF_FPArith (Model Validation Track)

    - + MathSATn 0 15796 @@ -174,7 +174,7 @@

    QF_FPArith (Model Validation Track)

    - + Bitwuzla 0 1729412478.85112567.7514 @@ -183,7 +183,7 @@

    QF_FPArith (Model Validation Track)

    - + cvc5 0 1727123576.95723566.20112 @@ -192,7 +192,7 @@

    QF_FPArith (Model Validation Track)

    - + z3-4.8.17n 0 1725457246.66463014.86225 @@ -201,7 +201,7 @@

    QF_FPArith (Model Validation Track)

    - + MathSATn 0 1579632924.49332855.93221 @@ -225,7 +225,6 @@

    QF_FPArith (Model Validation Track)

    - + - diff --git a/archive/2022/results/qf-fparith-proof-exhibition.html b/archive/2022/results/qf-fparith-proof-exhibition.html index d6dd068b..d760da2f 100644 --- a/archive/2022/results/qf-fparith-proof-exhibition.html +++ b/archive/2022/results/qf-fparith-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_FPArith (Proof Exhibition Track)

    Competition results for the QF_FPArith - + division - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_FPArith (Proof Exhibition Track)

    - + cvc5-lfsc 0 10685 @@ -130,7 +130,7 @@

    QF_FPArith (Proof Exhibition Track)

    - + cvc5 0 10628 @@ -152,7 +152,7 @@

    QF_FPArith (Proof Exhibition Track)

    - + cvc5-lfsc 0 106853750696.9663750814.052316303043 @@ -161,7 +161,7 @@

    QF_FPArith (Proof Exhibition Track)

    - + cvc5 0 106283829469.1243829230.797322003104 @@ -185,7 +185,6 @@

    QF_FPArith (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/qf-fparith-single-query.html b/archive/2022/results/qf-fparith-single-query.html index d86d9f76..6f32d6e2 100644 --- a/archive/2022/results/qf-fparith-single-query.html +++ b/archive/2022/results/qf-fparith-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_FPArith (Single Query Track)

    Competition results for the QF_FPArith - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_FPArith (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) BitwuzlaBitwuzlaBitwuzla - - + + Bitwuzla - - + + Bitwuzla - + @@ -131,7 +131,7 @@

    QF_FPArith (Single Query Track)

    - + Bitwuzla 0 1439 @@ -142,7 +142,7 @@

    QF_FPArith (Single Query Track)

    - + cvc5 0 1429 @@ -153,7 +153,7 @@

    QF_FPArith (Single Query Track)

    - + 2021-cvc5n 0 1423 @@ -164,7 +164,7 @@

    QF_FPArith (Single Query Track)

    - + MathSATn 0 1386 @@ -175,7 +175,7 @@

    QF_FPArith (Single Query Track)

    - + z3-4.8.17n 0 1316 @@ -186,7 +186,7 @@

    QF_FPArith (Single Query Track)

    - + COLIBRI 0 1267 @@ -208,7 +208,7 @@

    QF_FPArith (Single Query Track)

    - + Bitwuzla 0 143982610.63882576.183143954989061045 @@ -217,7 +217,7 @@

    QF_FPArith (Single Query Track)

    - + cvc5 0 1429132305.219132133.386142954988071071 @@ -226,7 +226,7 @@

    QF_FPArith (Single Query Track)

    - + 2021-cvc5n 0 1423133847.276133704.732142354288177077 @@ -235,7 +235,7 @@

    QF_FPArith (Single Query Track)

    - + MathSATn 0 1386160001.767159929.90113865348521140103 @@ -244,7 +244,7 @@

    QF_FPArith (Single Query Track)

    - + z3-4.8.17n 0 1316294675.736304569.52313165048121840156 @@ -253,7 +253,7 @@

    QF_FPArith (Single Query Track)

    - + COLIBRI 0 1267144280.522144248.22612675167512321112 @@ -273,7 +273,7 @@

    QF_FPArith (Single Query Track)

    - + Bitwuzla 0 54913236.78813231.95254954901493745 @@ -282,7 +282,7 @@

    QF_FPArith (Single Query Track)

    - + cvc5 0 54929685.15929659.44754954901493771 @@ -291,7 +291,7 @@

    QF_FPArith (Single Query Track)

    - + 2021-cvc5n 0 54235449.63835407.37754254202193777 @@ -300,7 +300,7 @@

    QF_FPArith (Single Query Track)

    - + MathSATn 0 53439717.30939716.76534534029937103 @@ -309,7 +309,7 @@

    QF_FPArith (Single Query Track)

    - + COLIBRI 0 51635992.86635976.572516516046938112 @@ -318,7 +318,7 @@

    QF_FPArith (Single Query Track)

    - + z3-4.8.17n 0 50488235.96795171.904504504059937156 @@ -338,7 +338,7 @@

    QF_FPArith (Single Query Track)

    - + Bitwuzla 0 89057373.8557344.23189008903757345 @@ -347,7 +347,7 @@

    QF_FPArith (Single Query Track)

    - + 2021-cvc5n 0 88186397.63886297.35488108814657377 @@ -356,7 +356,7 @@

    QF_FPArith (Single Query Track)

    - + cvc5 0 88090620.0690473.93988008804757371 @@ -365,7 +365,7 @@

    QF_FPArith (Single Query Track)

    - + MathSATn 0 852108284.458108213.141852085275573103 @@ -374,7 +374,7 @@

    QF_FPArith (Single Query Track)

    - + z3-4.8.17n 0 812194439.769197397.6198120812115573156 @@ -383,7 +383,7 @@

    QF_FPArith (Single Query Track)

    - + COLIBRI 0 75196287.65596271.6547510751176573112 @@ -403,7 +403,7 @@

    QF_FPArith (Single Query Track)

    - + Bitwuzla 0 13285623.6355584.68213285068221720156 @@ -412,7 +412,7 @@

    QF_FPArith (Single Query Track)

    - + COLIBRI 0 12326305.4876272.50812325057272671148 @@ -421,7 +421,7 @@

    QF_FPArith (Single Query Track)

    - + MathSATn 0 12299755.249717.12812294637662710260 @@ -430,7 +430,7 @@

    QF_FPArith (Single Query Track)

    - + 2021-cvc5n 0 121811580.34211431.62912184847342820282 @@ -439,7 +439,7 @@

    QF_FPArith (Single Query Track)

    - + cvc5 0 119211839.63211701.88611924637293080308 @@ -448,7 +448,7 @@

    QF_FPArith (Single Query Track)

    - + z3-4.8.17n 0 89818246.09218236.5778983765226020598 @@ -472,7 +472,6 @@

    QF_FPArith (Single Query Track)

    - + - diff --git a/archive/2022/results/qf-fparith-unsat-core.html b/archive/2022/results/qf-fparith-unsat-core.html index 7a2b6c20..52e82812 100644 --- a/archive/2022/results/qf-fparith-unsat-core.html +++ b/archive/2022/results/qf-fparith-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_FPArith (Unsat Core Track)

    Competition results for the QF_FPArith - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_FPArith (Unsat Core Track)

    Sequential PerformanceParallel Performance BitwuzlaBitwuzla - - + + @@ -126,7 +126,7 @@

    QF_FPArith (Unsat Core Track)

    - + z3-4.8.17n 0 31486 @@ -137,7 +137,7 @@

    QF_FPArith (Unsat Core Track)

    - + Bitwuzla 0 31273 @@ -148,7 +148,7 @@

    QF_FPArith (Unsat Core Track)

    - + 2021-Bitwuzlan 0 31082 @@ -159,7 +159,7 @@

    QF_FPArith (Unsat Core Track)

    - + cvc5 0 27683 @@ -170,7 +170,7 @@

    QF_FPArith (Unsat Core Track)

    - + MathSATn 0 749 @@ -192,7 +192,7 @@

    QF_FPArith (Unsat Core Track)

    - + z3-4.8.17n 0 31486997966.062997706.17777 @@ -201,7 +201,7 @@

    QF_FPArith (Unsat Core Track)

    - + Bitwuzla 0 3127350227.48450165.04922 @@ -210,7 +210,7 @@

    QF_FPArith (Unsat Core Track)

    - + 2021-Bitwuzlan 0 3108251845.42251772.34528 @@ -219,7 +219,7 @@

    QF_FPArith (Unsat Core Track)

    - + cvc5 0 27683718348.162718285.193519 @@ -228,7 +228,7 @@

    QF_FPArith (Unsat Core Track)

    - + MathSATn 0 749395.805404.10 @@ -252,7 +252,6 @@

    QF_FPArith (Unsat Core Track)

    - + - diff --git a/archive/2022/results/qf-fplra-incremental.html b/archive/2022/results/qf-fplra-incremental.html index daaa54c0..088e099c 100644 --- a/archive/2022/results/qf-fplra-incremental.html +++ b/archive/2022/results/qf-fplra-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_FPLRA (Incremental Track)

    Competition results for the QF_FPLRA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_FPLRA (Incremental Track)

    Parallel Performance Bitwuzla - - + + @@ -124,7 +124,7 @@

    QF_FPLRA (Incremental Track)

    - + Bitwuzla 0 480.1710.16900 @@ -133,7 +133,7 @@

    QF_FPLRA (Incremental Track)

    - + cvc5 0 480.3510.34600 @@ -142,7 +142,7 @@

    QF_FPLRA (Incremental Track)

    - + MathSATn 0 480.5010.49900 @@ -151,7 +151,7 @@

    QF_FPLRA (Incremental Track)

    - + z3-4.8.17n 0 4819.03519.0400 @@ -175,7 +175,6 @@

    QF_FPLRA (Incremental Track)

    - + - diff --git a/archive/2022/results/qf-fplra-model-validation.html b/archive/2022/results/qf-fplra-model-validation.html index 50b270cb..880ccb0b 100644 --- a/archive/2022/results/qf-fplra-model-validation.html +++ b/archive/2022/results/qf-fplra-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_FPLRA (Model Validation Track)

    Competition results for the QF_FPLRA - + logic - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_FPLRA (Model Validation Track)

    Sequential PerformanceParallel Performance BitwuzlaBitwuzla - - + + @@ -126,7 +126,7 @@

    QF_FPLRA (Model Validation Track)

    - + Bitwuzla 0 51 @@ -137,7 +137,7 @@

    QF_FPLRA (Model Validation Track)

    - + cvc5 0 45 @@ -148,7 +148,7 @@

    QF_FPLRA (Model Validation Track)

    - + z3-4.8.17n 0 42 @@ -159,7 +159,7 @@

    QF_FPLRA (Model Validation Track)

    - + MathSATn 0 27 @@ -181,7 +181,7 @@

    QF_FPLRA (Model Validation Track)

    - + Bitwuzla 0 511888.4961883.6360 @@ -190,7 +190,7 @@

    QF_FPLRA (Model Validation Track)

    - + cvc5 0 458315.2378315.3986 @@ -199,7 +199,7 @@

    QF_FPLRA (Model Validation Track)

    - + z3-4.8.17n 0 4214818.6114819.5817 @@ -208,7 +208,7 @@

    QF_FPLRA (Model Validation Track)

    - + MathSATn 0 277674.2167674.3986 @@ -232,7 +232,6 @@

    QF_FPLRA (Model Validation Track)

    - + - diff --git a/archive/2022/results/qf-fplra-proof-exhibition.html b/archive/2022/results/qf-fplra-proof-exhibition.html index 0b19647c..3ed18044 100644 --- a/archive/2022/results/qf-fplra-proof-exhibition.html +++ b/archive/2022/results/qf-fplra-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_FPLRA (Proof Exhibition Track)

    Competition results for the QF_FPLRA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_FPLRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 2 @@ -130,7 +130,7 @@

    QF_FPLRA (Proof Exhibition Track)

    - + cvc5 0 2 @@ -152,7 +152,7 @@

    QF_FPLRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 23639.533639.52943 @@ -161,7 +161,7 @@

    QF_FPLRA (Proof Exhibition Track)

    - + cvc5 0 24800.14800.09844 @@ -185,7 +185,6 @@

    QF_FPLRA (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/qf-fplra-single-query.html b/archive/2022/results/qf-fplra-single-query.html index bbd759a2..53cd1853 100644 --- a/archive/2022/results/qf-fplra-single-query.html +++ b/archive/2022/results/qf-fplra-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_FPLRA (Single Query Track)

    Competition results for the QF_FPLRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_FPLRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) COLIBRICOLIBRIBitwuzla - - + + COLIBRI - - + + COLIBRI - + @@ -131,7 +131,7 @@

    QF_FPLRA (Single Query Track)

    - + COLIBRI 0 53 @@ -142,7 +142,7 @@

    QF_FPLRA (Single Query Track)

    - + Bitwuzla 0 53 @@ -153,7 +153,7 @@

    QF_FPLRA (Single Query Track)

    - + cvc5 0 47 @@ -164,7 +164,7 @@

    QF_FPLRA (Single Query Track)

    - + 2021-cvc5n 0 47 @@ -175,7 +175,7 @@

    QF_FPLRA (Single Query Track)

    - + MathSATn 0 46 @@ -186,7 +186,7 @@

    QF_FPLRA (Single Query Track)

    - + z3-4.8.17n 0 43 @@ -208,7 +208,7 @@

    QF_FPLRA (Single Query Track)

    - + COLIBRI 0 532470.2532470.3265349422 @@ -217,7 +217,7 @@

    QF_FPLRA (Single Query Track)

    - + Bitwuzla 0 534858.3024858.695351222 @@ -226,7 +226,7 @@

    QF_FPLRA (Single Query Track)

    - + cvc5 0 4710787.23310787.3644745288 @@ -235,7 +235,7 @@

    QF_FPLRA (Single Query Track)

    - + 2021-cvc5n 0 4710946.14910946.3734745288 @@ -244,7 +244,7 @@

    QF_FPLRA (Single Query Track)

    - + MathSATn 0 4611273.18311273.2264645199 @@ -253,7 +253,7 @@

    QF_FPLRA (Single Query Track)

    - + z3-4.8.17n 0 4318410.36218411.242434211210 @@ -273,7 +273,7 @@

    QF_FPLRA (Single Query Track)

    - + Bitwuzla 0 511878.3781878.66651510042 @@ -282,7 +282,7 @@

    QF_FPLRA (Single Query Track)

    - + COLIBRI 0 492453.2822453.32549490242 @@ -291,7 +291,7 @@

    QF_FPLRA (Single Query Track)

    - + MathSATn 0 457672.0567672.09945450649 @@ -300,7 +300,7 @@

    QF_FPLRA (Single Query Track)

    - + cvc5 0 458313.5348313.66345450648 @@ -309,7 +309,7 @@

    QF_FPLRA (Single Query Track)

    - + 2021-cvc5n 0 458505.2188505.40645450648 @@ -318,7 +318,7 @@

    QF_FPLRA (Single Query Track)

    - + z3-4.8.17n 0 4214810.31114811.193424209410 @@ -338,7 +338,7 @@

    QF_FPLRA (Single Query Track)

    - + COLIBRI 0 416.9717.0014040512 @@ -347,7 +347,7 @@

    QF_FPLRA (Single Query Track)

    - + 2021-cvc5n 0 22440.9312440.9672022518 @@ -356,7 +356,7 @@

    QF_FPLRA (Single Query Track)

    - + cvc5 0 22473.6992473.7012022518 @@ -365,7 +365,7 @@

    QF_FPLRA (Single Query Track)

    - + Bitwuzla 0 22979.9232980.0232022512 @@ -374,7 +374,7 @@

    QF_FPLRA (Single Query Track)

    - + z3-4.8.17n 0 13600.0513600.04910135110 @@ -383,7 +383,7 @@

    QF_FPLRA (Single Query Track)

    - + MathSATn 0 13601.1273601.1271013519 @@ -403,7 +403,7 @@

    QF_FPLRA (Single Query Track)

    - + COLIBRI 0 53118.253118.3265349422 @@ -412,7 +412,7 @@

    QF_FPLRA (Single Query Track)

    - + Bitwuzla 0 47231.269231.2774746188 @@ -421,7 +421,7 @@

    QF_FPLRA (Single Query Track)

    - + 2021-cvc5n 0 43320.785320.796434211212 @@ -430,7 +430,7 @@

    QF_FPLRA (Single Query Track)

    - + cvc5 0 43320.886320.88434211212 @@ -439,7 +439,7 @@

    QF_FPLRA (Single Query Track)

    - + MathSATn 0 42344.056344.074424111313 @@ -448,7 +448,7 @@

    QF_FPLRA (Single Query Track)

    - + z3-4.8.17n 0 22973.832973.82222113331 @@ -472,7 +472,6 @@

    QF_FPLRA (Single Query Track)

    - + - diff --git a/archive/2022/results/qf-idl-cloud.html b/archive/2022/results/qf-idl-cloud.html index a5131c26..d4d995d8 100644 --- a/archive/2022/results/qf-idl-cloud.html +++ b/archive/2022/results/qf-idl-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_IDL (Cloud Track)

    Competition results for the QF_IDL - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    QF_IDL (Cloud Track)

    - + cvc5-cloud 42 965054.076927540 @@ -126,7 +126,7 @@

    QF_IDL (Cloud Track)

    - + SMTS cube-and-conquer (fixed) 44 141096.535110620 @@ -135,7 +135,7 @@

    QF_IDL (Cloud Track)

    - + SMTS cube-and-conquer 45 028549.919000630 @@ -144,7 +144,7 @@

    QF_IDL (Cloud Track)

    - + SMTS portfolio 45 033298.7000630 @@ -164,7 +164,7 @@

    QF_IDL (Cloud Track)

    - + cvc5-cloud 23 227858.35922023330 @@ -173,7 +173,7 @@

    QF_IDL (Cloud Track)

    - + SMTS cube-and-conquer (fixed) 23 114597.21411024330 @@ -182,7 +182,7 @@

    QF_IDL (Cloud Track)

    - + SMTS cube-and-conquer 24 08179.11400025330 @@ -191,7 +191,7 @@

    QF_IDL (Cloud Track)

    - + SMTS portfolio 24 011940.13700025330 @@ -211,7 +211,7 @@

    QF_IDL (Cloud Track)

    - + cvc5-cloud 19 722795.71770719320 @@ -220,7 +220,7 @@

    QF_IDL (Cloud Track)

    - + SMTS cube-and-conquer 21 014177.27100026320 @@ -229,7 +229,7 @@

    QF_IDL (Cloud Track)

    - + SMTS cube-and-conquer (fixed) 21 015171.92100026320 @@ -238,7 +238,7 @@

    QF_IDL (Cloud Track)

    - + SMTS portfolio 21 016339.75200026320 @@ -258,7 +258,7 @@

    QF_IDL (Cloud Track)

    - + cvc5-cloud 1 51393.3475055857 @@ -267,7 +267,7 @@

    QF_IDL (Cloud Track)

    - + SMTS cube-and-conquer (fixed) 6 11493.2561106254 @@ -276,7 +276,7 @@

    QF_IDL (Cloud Track)

    - + SMTS cube-and-conquer 6 01497.7250006353 @@ -285,7 +285,7 @@

    QF_IDL (Cloud Track)

    - + SMTS portfolio 6 01509.5350006353 @@ -309,7 +309,6 @@

    QF_IDL (Cloud Track)

    - + - diff --git a/archive/2022/results/qf-idl-model-validation.html b/archive/2022/results/qf-idl-model-validation.html index 6c6a8377..27689090 100644 --- a/archive/2022/results/qf-idl-model-validation.html +++ b/archive/2022/results/qf-idl-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_IDL (Model Validation Track)

    Competition results for the QF_IDL - + logic - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_IDL (Model Validation Track)

    Sequential PerformanceParallel Performance Z3++Z3++ - - + + @@ -126,7 +126,7 @@

    QF_IDL (Model Validation Track)

    - + Z3++ 0 684 @@ -137,7 +137,7 @@

    QF_IDL (Model Validation Track)

    - + z3-4.8.17n 0 665 @@ -148,7 +148,7 @@

    QF_IDL (Model Validation Track)

    - + 2020-z3n 0 659 @@ -159,7 +159,7 @@

    QF_IDL (Model Validation Track)

    - + Yices2 0 651 @@ -170,7 +170,7 @@

    QF_IDL (Model Validation Track)

    - + OpenSMT 0 596 @@ -181,7 +181,7 @@

    QF_IDL (Model Validation Track)

    - + cvc5 0 561 @@ -192,7 +192,7 @@

    QF_IDL (Model Validation Track)

    - + smtinterpol 0 447 @@ -203,7 +203,7 @@

    QF_IDL (Model Validation Track)

    - + MathSATn 0 426 @@ -225,7 +225,7 @@

    QF_IDL (Model Validation Track)

    - + Z3++ 0 684109164.673109162.53959 @@ -234,7 +234,7 @@

    QF_IDL (Model Validation Track)

    - + z3-4.8.17n 0 665124580.575124519.84778 @@ -243,7 +243,7 @@

    QF_IDL (Model Validation Track)

    - + 2020-z3n 0 659122765.975122729.02980 @@ -252,7 +252,7 @@

    QF_IDL (Model Validation Track)

    - + Yices2 0 651127880.433127856.38392 @@ -261,7 +261,7 @@

    QF_IDL (Model Validation Track)

    - + OpenSMT 0 596231041.313231043.77147 @@ -270,7 +270,7 @@

    QF_IDL (Model Validation Track)

    - + cvc5 0 561278809.665278808.437182 @@ -279,7 +279,7 @@

    QF_IDL (Model Validation Track)

    - + smtinterpol 0 447414564.584408324.505296 @@ -288,7 +288,7 @@

    QF_IDL (Model Validation Track)

    - + MathSATn 0 426401051.168401037.8305 @@ -312,7 +312,6 @@

    QF_IDL (Model Validation Track)

    - + - diff --git a/archive/2022/results/qf-idl-parallel.html b/archive/2022/results/qf-idl-parallel.html index df4ddf37..b5935c3e 100644 --- a/archive/2022/results/qf-idl-parallel.html +++ b/archive/2022/results/qf-idl-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_IDL (Parallel Track)

    Competition results for the QF_IDL - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    QF_IDL (Parallel Track)

    - + SMTS portfolio 44 273212.373220610 @@ -126,7 +126,7 @@

    QF_IDL (Parallel Track)

    - + SMTS cube-and-conquer 44 273212.898220610 @@ -135,7 +135,7 @@

    QF_IDL (Parallel Track)

    - + SMTS cube-and-conquer (fixed) 44 273213.725220610 @@ -155,7 +155,7 @@

    QF_IDL (Parallel Track)

    - + SMTS portfolio 23 227612.37322023330 @@ -164,7 +164,7 @@

    QF_IDL (Parallel Track)

    - + SMTS cube-and-conquer 23 227612.89822023330 @@ -173,7 +173,7 @@

    QF_IDL (Parallel Track)

    - + SMTS cube-and-conquer (fixed) 23 227613.72522023330 @@ -193,7 +193,7 @@

    QF_IDL (Parallel Track)

    - + SMTS cube-and-conquer 21 025200.000021370 @@ -202,7 +202,7 @@

    QF_IDL (Parallel Track)

    - + SMTS cube-and-conquer (fixed) 21 025200.000021370 @@ -211,7 +211,7 @@

    QF_IDL (Parallel Track)

    - + SMTS portfolio 21 025200.000021370 @@ -231,7 +231,7 @@

    QF_IDL (Parallel Track)

    - + SMTS portfolio 0 21476.3732206161 @@ -240,7 +240,7 @@

    QF_IDL (Parallel Track)

    - + SMTS cube-and-conquer 0 21476.8982206161 @@ -249,7 +249,7 @@

    QF_IDL (Parallel Track)

    - + SMTS cube-and-conquer (fixed) 0 21477.7252206161 @@ -273,7 +273,6 @@

    QF_IDL (Parallel Track)

    - + - diff --git a/archive/2022/results/qf-idl-proof-exhibition.html b/archive/2022/results/qf-idl-proof-exhibition.html index e2d6a7a2..004f03c3 100644 --- a/archive/2022/results/qf-idl-proof-exhibition.html +++ b/archive/2022/results/qf-idl-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_IDL (Proof Exhibition Track)

    Competition results for the QF_IDL - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_IDL (Proof Exhibition Track)

    - + veriT 0 351 @@ -130,7 +130,7 @@

    QF_IDL (Proof Exhibition Track)

    - + smtinterpol 0 291 @@ -141,7 +141,7 @@

    QF_IDL (Proof Exhibition Track)

    - + cvc5-lfsc 0 266 @@ -152,7 +152,7 @@

    QF_IDL (Proof Exhibition Track)

    - + cvc5 0 45 @@ -174,7 +174,7 @@

    QF_IDL (Proof Exhibition Track)

    - + veriT 0 351138780.864138595.952107107 @@ -183,7 +183,7 @@

    QF_IDL (Proof Exhibition Track)

    - + smtinterpol 0 291237567.306225214.371167165 @@ -192,7 +192,7 @@

    QF_IDL (Proof Exhibition Track)

    - + cvc5-lfsc 0 266264758.431264745.013192192 @@ -201,7 +201,7 @@

    QF_IDL (Proof Exhibition Track)

    - + cvc5 0 45488505.623488388.504413403 @@ -225,7 +225,6 @@

    QF_IDL (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/qf-idl-single-query.html b/archive/2022/results/qf-idl-single-query.html index 2d432ab9..14b8befd 100644 --- a/archive/2022/results/qf-idl-single-query.html +++ b/archive/2022/results/qf-idl-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_IDL (Single Query Track)

    Competition results for the QF_IDL - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_IDL (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Z3++Z3++Z3++ - - + + Z3++ - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_IDL (Single Query Track)

    - + Z3++ 0 1049 @@ -142,7 +142,7 @@

    QF_IDL (Single Query Track)

    - + z3-4.8.17n 0 1000 @@ -153,7 +153,7 @@

    QF_IDL (Single Query Track)

    - + 2019-Par4n 0 980 @@ -164,7 +164,7 @@

    QF_IDL (Single Query Track)

    - + Yices2 0 968 @@ -175,7 +175,7 @@

    QF_IDL (Single Query Track)

    - + cvc5 0 904 @@ -186,7 +186,7 @@

    QF_IDL (Single Query Track)

    - + OpenSMT 0 884 @@ -197,7 +197,7 @@

    QF_IDL (Single Query Track)

    - + veriT 0 781 @@ -208,7 +208,7 @@

    QF_IDL (Single Query Track)

    - + MathSATn 0 742 @@ -219,7 +219,7 @@

    QF_IDL (Single Query Track)

    - + smtinterpol 0 693 @@ -241,7 +241,7 @@

    QF_IDL (Single Query Track)

    - + Z3++ 0 1049248725.234248679.7371049656393155155 @@ -250,7 +250,7 @@

    QF_IDL (Single Query Track)

    - + 2019-Par4n 0 1039417767.476253638.1961039649390165165 @@ -259,7 +259,7 @@

    QF_IDL (Single Query Track)

    - + z3-4.8.17n 0 1000282260.559282205.5141000638362204183 @@ -268,7 +268,7 @@

    QF_IDL (Single Query Track)

    - + Yices2 0 968314757.895314699.5968625343236236 @@ -277,7 +277,7 @@

    QF_IDL (Single Query Track)

    - + cvc5 0 904441319.109441134.397904527377300300 @@ -286,7 +286,7 @@

    QF_IDL (Single Query Track)

    - + OpenSMT 0 884444942.761444851.008884561323320320 @@ -295,7 +295,7 @@

    QF_IDL (Single Query Track)

    - + veriT 0 781562876.237562836.248781461320423423 @@ -304,7 +304,7 @@

    QF_IDL (Single Query Track)

    - + MathSATn 0 742599280.276599206.987742421321462462 @@ -313,7 +313,7 @@

    QF_IDL (Single Query Track)

    - + smtinterpol 0 701690908.345673145.867701422279503503 @@ -333,7 +333,7 @@

    QF_IDL (Single Query Track)

    - + Z3++ 0 656107725.386107679.737656656054494155 @@ -342,7 +342,7 @@

    QF_IDL (Single Query Track)

    - + 2019-Par4n 0 649181658.764100679.547649649061494165 @@ -351,7 +351,7 @@

    QF_IDL (Single Query Track)

    - + z3-4.8.17n 0 638120621.346120622.643638638072494183 @@ -360,7 +360,7 @@

    QF_IDL (Single Query Track)

    - + Yices2 0 625121762.697121753.644625625085494236 @@ -369,7 +369,7 @@

    QF_IDL (Single Query Track)

    - + OpenSMT 0 561226566.6226527.5835615610149494320 @@ -378,7 +378,7 @@

    QF_IDL (Single Query Track)

    - + cvc5 0 527274893.28274772.2995275270183494300 @@ -387,7 +387,7 @@

    QF_IDL (Single Query Track)

    - + veriT 0 461346010.251345972.6224614610249494423 @@ -396,7 +396,7 @@

    QF_IDL (Single Query Track)

    - + smtinterpol 0 422397409.442391532.2834224220288494503 @@ -405,7 +405,7 @@

    QF_IDL (Single Query Track)

    - + MathSATn 0 421379024.804378955.2384214210289494462 @@ -425,7 +425,7 @@

    QF_IDL (Single Query Track)

    - + Z3++ 0 39365399.84865400.0393039338773155 @@ -434,7 +434,7 @@

    QF_IDL (Single Query Track)

    - + 2019-Par4n 0 390160508.71277358.649390039041773165 @@ -443,7 +443,7 @@

    QF_IDL (Single Query Track)

    - + cvc5 0 37790825.82990762.098377037754773300 @@ -452,7 +452,7 @@

    QF_IDL (Single Query Track)

    - + z3-4.8.17n 0 36286039.21385982.87362036269773183 @@ -461,7 +461,7 @@

    QF_IDL (Single Query Track)

    - + Yices2 0 343117395.198117345.856343034388773236 @@ -470,7 +470,7 @@

    QF_IDL (Single Query Track)

    - + OpenSMT 0 323142776.161142723.4253230323108773320 @@ -479,7 +479,7 @@

    QF_IDL (Single Query Track)

    - + MathSATn 0 321144655.472144651.7493210321110773462 @@ -488,7 +488,7 @@

    QF_IDL (Single Query Track)

    - + veriT 0 320141265.985141263.6263200320111773423 @@ -497,7 +497,7 @@

    QF_IDL (Single Query Track)

    - + smtinterpol 0 279217898.903206013.5832790279152773503 @@ -517,7 +517,7 @@

    QF_IDL (Single Query Track)

    - + 2019-Par4n 0 86413108.5579509.514864559305340340 @@ -526,7 +526,7 @@

    QF_IDL (Single Query Track)

    - + Yices2 0 8499737.7259725.129849551298355355 @@ -535,7 +535,7 @@

    QF_IDL (Single Query Track)

    - + Z3++ 0 79212052.36312035.852792505287412412 @@ -544,7 +544,7 @@

    QF_IDL (Single Query Track)

    - + z3-4.8.17n 0 77911761.17211727.728779504275425404 @@ -553,7 +553,7 @@

    QF_IDL (Single Query Track)

    - + OpenSMT 0 60516739.43516702.467605333272599599 @@ -562,7 +562,7 @@

    QF_IDL (Single Query Track)

    - + MathSATn 0 56717086.09617045.308567291276637637 @@ -571,7 +571,7 @@

    QF_IDL (Single Query Track)

    - + cvc5 0 55718275.39718182.336557298259647647 @@ -580,7 +580,7 @@

    QF_IDL (Single Query Track)

    - + veriT 0 54117447.1517438.712541261280663663 @@ -589,7 +589,7 @@

    QF_IDL (Single Query Track)

    - + smtinterpol 0 41223872.15321112.615412222190792792 @@ -613,7 +613,6 @@

    QF_IDL (Single Query Track)

    - + - diff --git a/archive/2022/results/qf-idl-unsat-core.html b/archive/2022/results/qf-idl-unsat-core.html index 36a4c750..b9f077a5 100644 --- a/archive/2022/results/qf-idl-unsat-core.html +++ b/archive/2022/results/qf-idl-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_IDL (Unsat Core Track)

    Competition results for the QF_IDL - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_IDL (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    QF_IDL (Unsat Core Track)

    - + cvc5 0 572416 @@ -137,7 +137,7 @@

    QF_IDL (Unsat Core Track)

    - + 2021-cvc5-ucn 0 472760 @@ -148,7 +148,7 @@

    QF_IDL (Unsat Core Track)

    - + smtinterpol 0 463759 @@ -159,7 +159,7 @@

    QF_IDL (Unsat Core Track)

    - + MathSATn 0 430525 @@ -170,7 +170,7 @@

    QF_IDL (Unsat Core Track)

    - + z3-4.8.17n 0 140938 @@ -181,7 +181,7 @@

    QF_IDL (Unsat Core Track)

    - + Yices2 0 100968 @@ -203,7 +203,7 @@

    QF_IDL (Unsat Core Track)

    - + cvc5 0 572416100982.65100958.41877 @@ -212,7 +212,7 @@

    QF_IDL (Unsat Core Track)

    - + 2021-cvc5-ucn 0 472760106906.619106908.26182 @@ -221,7 +221,7 @@

    QF_IDL (Unsat Core Track)

    - + smtinterpol 0 463759103713.069101195.62481 @@ -230,7 +230,7 @@

    QF_IDL (Unsat Core Track)

    - + MathSATn 0 430525108578.226108579.39187 @@ -239,7 +239,7 @@

    QF_IDL (Unsat Core Track)

    - + z3-4.8.17n 0 140938107644.865107645.90984 @@ -248,7 +248,7 @@

    QF_IDL (Unsat Core Track)

    - + Yices2 0 100968112902.651112904.05986 @@ -272,7 +272,6 @@

    QF_IDL (Unsat Core Track)

    - + - diff --git a/archive/2022/results/qf-lia-cloud.html b/archive/2022/results/qf-lia-cloud.html index ca13967a..821433a6 100644 --- a/archive/2022/results/qf-lia-cloud.html +++ b/archive/2022/results/qf-lia-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LIA (Cloud Track)

    Competition results for the QF_LIA - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    QF_LIA (Cloud Track)

    - + SMTS cube-and-conquer (fixed) 10 167936.26316124100 @@ -126,7 +126,7 @@

    QF_LIA (Cloud Track)

    - + SMTS portfolio 12 1410501.66714140120 @@ -135,7 +135,7 @@

    QF_LIA (Cloud Track)

    - + SMTS cube-and-conquer 15 914816.597990170 @@ -144,7 +144,7 @@

    QF_LIA (Cloud Track)

    - + cvc5-cloud 17 126489.009110250 @@ -164,7 +164,7 @@

    QF_LIA (Cloud Track)

    - + SMTS cube-and-conquer (fixed) 5 125092.43912120860 @@ -173,7 +173,7 @@

    QF_LIA (Cloud Track)

    - + SMTS portfolio 6 145986.55114140660 @@ -182,7 +182,7 @@

    QF_LIA (Cloud Track)

    - + SMTS cube-and-conquer 9 911100.2159901160 @@ -191,7 +191,7 @@

    QF_LIA (Cloud Track)

    - + cvc5-cloud 11 122876.2311101960 @@ -211,7 +211,7 @@

    QF_LIA (Cloud Track)

    - + SMTS cube-and-conquer (fixed) 5 42843.8244042200 @@ -220,7 +220,7 @@

    QF_LIA (Cloud Track)

    - + cvc5-cloud 6 03612.7780006200 @@ -229,7 +229,7 @@

    QF_LIA (Cloud Track)

    - + SMTS cube-and-conquer 6 03716.3820006200 @@ -238,7 +238,7 @@

    QF_LIA (Cloud Track)

    - + SMTS portfolio 6 04515.1160006200 @@ -258,7 +258,7 @@

    QF_LIA (Cloud Track)

    - + SMTS portfolio 0 11480.161111101515 @@ -267,7 +267,7 @@

    QF_LIA (Cloud Track)

    - + SMTS cube-and-conquer (fixed) 1 8517.0078801817 @@ -276,7 +276,7 @@

    QF_LIA (Cloud Track)

    - + SMTS cube-and-conquer 1 5530.9825502120 @@ -285,7 +285,7 @@

    QF_LIA (Cloud Track)

    - + cvc5-cloud 3 0564.7780002623 @@ -309,7 +309,6 @@

    QF_LIA (Cloud Track)

    - + - diff --git a/archive/2022/results/qf-lia-incremental.html b/archive/2022/results/qf-lia-incremental.html index 9cb24274..b3c92312 100644 --- a/archive/2022/results/qf-lia-incremental.html +++ b/archive/2022/results/qf-lia-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LIA (Incremental Track)

    Competition results for the QF_LIA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_LIA (Incremental Track)

    Parallel Performance Yices2 - - + + @@ -124,7 +124,7 @@

    QF_LIA (Incremental Track)

    - + 2021-Yices2 incrementaln 0 2004033132529.77232010.02288320 @@ -133,7 +133,7 @@

    QF_LIA (Incremental Track)

    - + Yices2 0 2004033132607.90532086.35588320 @@ -142,7 +142,7 @@

    QF_LIA (Incremental Track)

    - + z3-4.8.17n 0 1963753652201.51851619.05640367832 @@ -151,7 +151,7 @@

    QF_LIA (Incremental Track)

    - + MathSATn 0 1737328641517.38241028.937266792826 @@ -160,7 +160,7 @@

    QF_LIA (Incremental Track)

    - + smtinterpol 0 1079883565748.62564494.406924237948 @@ -169,7 +169,7 @@

    QF_LIA (Incremental Track)

    - + cvc5 0 373325565635.31965606.8881630795950 @@ -178,7 +178,7 @@

    QF_LIA (Incremental Track)

    - + OpenSMT 0 83441656697.84656693.3661920679842 @@ -202,7 +202,6 @@

    QF_LIA (Incremental Track)

    - + - diff --git a/archive/2022/results/qf-lia-model-validation.html b/archive/2022/results/qf-lia-model-validation.html index 021f6111..18e966a8 100644 --- a/archive/2022/results/qf-lia-model-validation.html +++ b/archive/2022/results/qf-lia-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LIA (Model Validation Track)

    Competition results for the QF_LIA - + logic - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_LIA (Model Validation Track)

    Sequential PerformanceParallel Performance Z3++Z3++ - - + + @@ -126,7 +126,7 @@

    QF_LIA (Model Validation Track)

    - + Z3++ 0 4078 @@ -137,7 +137,7 @@

    QF_LIA (Model Validation Track)

    - + 2020-z3n 0 4073 @@ -148,7 +148,7 @@

    QF_LIA (Model Validation Track)

    - + MathSATn 0 4050 @@ -159,7 +159,7 @@

    QF_LIA (Model Validation Track)

    - + z3-4.8.17n 0 4032 @@ -170,7 +170,7 @@

    QF_LIA (Model Validation Track)

    - + OpenSMT 0 3994 @@ -181,7 +181,7 @@

    QF_LIA (Model Validation Track)

    - + cvc5 0 3992 @@ -192,7 +192,7 @@

    QF_LIA (Model Validation Track)

    - + Yices2 0 3905 @@ -203,7 +203,7 @@

    QF_LIA (Model Validation Track)

    - + smtinterpol 0 3814 @@ -225,7 +225,7 @@

    QF_LIA (Model Validation Track)

    - + Z3++ 0 4078223688.172224734.873106 @@ -234,7 +234,7 @@

    QF_LIA (Model Validation Track)

    - + 2020-z3n 0 4073272179.315272050.813108 @@ -243,7 +243,7 @@

    QF_LIA (Model Validation Track)

    - + MathSATn 0 4050266622.02266555.603132 @@ -252,7 +252,7 @@

    QF_LIA (Model Validation Track)

    - + z3-4.8.17n 0 4032284933.215284664.68162 @@ -261,7 +261,7 @@

    QF_LIA (Model Validation Track)

    - + OpenSMT 0 3994477456.615477330.582201 @@ -270,7 +270,7 @@

    QF_LIA (Model Validation Track)

    - + cvc5 0 3992352563.424352470.659203 @@ -279,7 +279,7 @@

    QF_LIA (Model Validation Track)

    - + Yices2 0 3905408770.615408727.321291 @@ -288,7 +288,7 @@

    QF_LIA (Model Validation Track)

    - + smtinterpol 0 3814600133.529575944.074382 @@ -312,7 +312,6 @@

    QF_LIA (Model Validation Track)

    - + - diff --git a/archive/2022/results/qf-lia-parallel.html b/archive/2022/results/qf-lia-parallel.html index 77b4af56..371d0800 100644 --- a/archive/2022/results/qf-lia-parallel.html +++ b/archive/2022/results/qf-lia-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LIA (Parallel Track)

    Competition results for the QF_LIA - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    QF_LIA (Parallel Track)

    - + SMTS cube-and-conquer (fixed) 11 1411059.7614131120 @@ -126,7 +126,7 @@

    QF_LIA (Parallel Track)

    - + SMTS cube-and-conquer 12 1413501.86614140120 @@ -135,7 +135,7 @@

    QF_LIA (Parallel Track)

    - + SMTS portfolio 12 1314524.14413130130 @@ -155,7 +155,7 @@

    QF_LIA (Parallel Track)

    - + SMTS cube-and-conquer (fixed) 3 137099.81913130760 @@ -164,7 +164,7 @@

    QF_LIA (Parallel Track)

    - + SMTS cube-and-conquer 6 148540.05314140660 @@ -173,7 +173,7 @@

    QF_LIA (Parallel Track)

    - + SMTS portfolio 6 139559.21913130760 @@ -193,7 +193,7 @@

    QF_LIA (Parallel Track)

    - + SMTS cube-and-conquer 6 04961.8130006200 @@ -202,7 +202,7 @@

    QF_LIA (Parallel Track)

    - + SMTS portfolio 6 04964.9250006200 @@ -211,7 +211,7 @@

    QF_LIA (Parallel Track)

    - + SMTS cube-and-conquer (fixed) 8 13959.9411015200 @@ -231,7 +231,7 @@

    QF_LIA (Parallel Track)

    - + SMTS portfolio 0 9514.4159901717 @@ -240,7 +240,7 @@

    QF_LIA (Parallel Track)

    - + SMTS cube-and-conquer 0 6543.446602020 @@ -249,7 +249,7 @@

    QF_LIA (Parallel Track)

    - + SMTS cube-and-conquer (fixed) 0 5547.4055502121 @@ -273,7 +273,6 @@

    QF_LIA (Parallel Track)

    - + - diff --git a/archive/2022/results/qf-lia-proof-exhibition.html b/archive/2022/results/qf-lia-proof-exhibition.html index ada1df0e..85e46afd 100644 --- a/archive/2022/results/qf-lia-proof-exhibition.html +++ b/archive/2022/results/qf-lia-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LIA (Proof Exhibition Track)

    Competition results for the QF_LIA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_LIA (Proof Exhibition Track)

    - + smtinterpol 0 2240 @@ -130,7 +130,7 @@

    QF_LIA (Proof Exhibition Track)

    - + OpenSMT 0 2023 @@ -141,7 +141,7 @@

    QF_LIA (Proof Exhibition Track)

    - + veriT 0 1435 @@ -152,7 +152,7 @@

    QF_LIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 1275 @@ -163,7 +163,7 @@

    QF_LIA (Proof Exhibition Track)

    - + cvc5 0 854 @@ -185,7 +185,7 @@

    QF_LIA (Proof Exhibition Track)

    - + smtinterpol 0 2240415744.148332486.128137126 @@ -194,7 +194,7 @@

    QF_LIA (Proof Exhibition Track)

    - + OpenSMT 0 2023229554.504229126.08235468 @@ -203,7 +203,7 @@

    QF_LIA (Proof Exhibition Track)

    - + veriT 0 14351048726.571048524.166942815 @@ -212,7 +212,7 @@

    QF_LIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 12751349252.4621349151.78411021094 @@ -221,7 +221,7 @@

    QF_LIA (Proof Exhibition Track)

    - + cvc5 0 8541808152.891807901.38315231489 @@ -245,7 +245,6 @@

    QF_LIA (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/qf-lia-single-query.html b/archive/2022/results/qf-lia-single-query.html index 88ae3538..70c2b0ee 100644 --- a/archive/2022/results/qf-lia-single-query.html +++ b/archive/2022/results/qf-lia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LIA (Single Query Track)

    Competition results for the QF_LIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_LIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) OpenSMTOpenSMTZ3++ - - + + OpenSMT - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_LIA (Single Query Track)

    - + 2019-Par4n 0 6130 @@ -142,7 +142,7 @@

    QF_LIA (Single Query Track)

    - + MathSATn 0 5958 @@ -153,7 +153,7 @@

    QF_LIA (Single Query Track)

    - + OpenSMT 0 5944 @@ -164,7 +164,7 @@

    QF_LIA (Single Query Track)

    - + cvc5 0 5883 @@ -175,7 +175,7 @@

    QF_LIA (Single Query Track)

    - + Yices2 0 5794 @@ -186,7 +186,7 @@

    QF_LIA (Single Query Track)

    - + Z3++ 0 5756 @@ -197,7 +197,7 @@

    QF_LIA (Single Query Track)

    - + smtinterpol 0 5734 @@ -208,7 +208,7 @@

    QF_LIA (Single Query Track)

    - + z3-4.8.17n 0 5661 @@ -219,7 +219,7 @@

    QF_LIA (Single Query Track)

    - + veriT 0 4218 @@ -241,7 +241,7 @@

    QF_LIA (Single Query Track)

    - + 2019-Par4n 0 6148157760.723109860.8986148394822007162 @@ -250,7 +250,7 @@

    QF_LIA (Single Query Track)

    - + MathSATn 0 5958553347.845553065.482595838322126261261 @@ -259,7 +259,7 @@

    QF_LIA (Single Query Track)

    - + OpenSMT 0 5944652078.445651746.156594437702174275269 @@ -268,7 +268,7 @@

    QF_LIA (Single Query Track)

    - + cvc5 0 5883714386.799714211.528588337642119336335 @@ -277,7 +277,7 @@

    QF_LIA (Single Query Track)

    - + Yices2 0 5794573586.896573549.653579436752119425425 @@ -286,7 +286,7 @@

    QF_LIA (Single Query Track)

    - + Z3++ 0 5756767734.517771033.045575638461910463434 @@ -295,7 +295,7 @@

    QF_LIA (Single Query Track)

    - + smtinterpol 0 5739881456.452800949.122573935802159480480 @@ -304,7 +304,7 @@

    QF_LIA (Single Query Track)

    - + z3-4.8.17n 0 5661886222.851885764.586566138011860558553 @@ -313,7 +313,7 @@

    QF_LIA (Single Query Track)

    - + veriT 0 42182012010.1872011819.48142182984123420011589 @@ -333,7 +333,7 @@

    QF_LIA (Single Query Track)

    - + 2019-Par4n 0 394887536.02549461.10739483948025224662 @@ -342,7 +342,7 @@

    QF_LIA (Single Query Track)

    - + Z3++ 0 3846233702.734235083.0523846384601272246434 @@ -351,7 +351,7 @@

    QF_LIA (Single Query Track)

    - + MathSATn 0 3832272245.712272069.9883832383201412246261 @@ -360,7 +360,7 @@

    QF_LIA (Single Query Track)

    - + z3-4.8.17n 0 3801289109.916288871.5463801380101722246553 @@ -369,7 +369,7 @@

    QF_LIA (Single Query Track)

    - + OpenSMT 0 3770468636.104468478.7263770377002032246269 @@ -378,7 +378,7 @@

    QF_LIA (Single Query Track)

    - + cvc5 0 3764403213.167403136.0373764376402092246335 @@ -387,7 +387,7 @@

    QF_LIA (Single Query Track)

    - + Yices2 0 3675413697.322413666.3443675367502982246425 @@ -396,7 +396,7 @@

    QF_LIA (Single Query Track)

    - + smtinterpol 0 3580616031.878592734.7943580358003932246480 @@ -405,7 +405,7 @@

    QF_LIA (Single Query Track)

    - + veriT 0 2984865084.879864890.67629842984098922461589 @@ -425,7 +425,7 @@

    QF_LIA (Single Query Track)

    - + 2019-Par4n 0 220035424.69825599.79122000220017400262 @@ -434,7 +434,7 @@

    QF_LIA (Single Query Track)

    - + OpenSMT 0 2174148642.341148467.431217402174434002269 @@ -443,7 +443,7 @@

    QF_LIA (Single Query Track)

    - + smtinterpol 0 2159230624.574173414.328215902159584002480 @@ -452,7 +452,7 @@

    QF_LIA (Single Query Track)

    - + MathSATn 0 2126246302.133246195.494212602126914002261 @@ -461,7 +461,7 @@

    QF_LIA (Single Query Track)

    - + Yices2 0 2119125089.573125083.309211902119984002425 @@ -470,7 +470,7 @@

    QF_LIA (Single Query Track)

    - + cvc5 0 2119276373.632276275.491211902119984002335 @@ -479,7 +479,7 @@

    QF_LIA (Single Query Track)

    - + Z3++ 0 1910504969.617505865.1791910019103074002434 @@ -488,7 +488,7 @@

    QF_LIA (Single Query Track)

    - + z3-4.8.17n 0 1860562312.935562093.0391860018603574002553 @@ -497,7 +497,7 @@

    QF_LIA (Single Query Track)

    - + veriT 0 12341112125.3071112128.80412340123498340021589 @@ -517,7 +517,7 @@

    QF_LIA (Single Query Track)

    - + 2019-Par4n 0 605512134.9837124.107605538722183164155 @@ -526,7 +526,7 @@

    QF_LIA (Single Query Track)

    - + Yices2 0 557419127.99919129.302557434952079645645 @@ -535,7 +535,7 @@

    QF_LIA (Single Query Track)

    - + MathSATn 0 482439496.03739325.8648243285153913951395 @@ -544,7 +544,7 @@

    QF_LIA (Single Query Track)

    - + z3-4.8.17n 0 478142766.78942565.54347813348143314381433 @@ -553,7 +553,7 @@

    QF_LIA (Single Query Track)

    - + Z3++ 0 471143768.22243697.01247113325138615081497 @@ -562,7 +562,7 @@

    QF_LIA (Single Query Track)

    - + OpenSMT 0 456248072.26647962.40245622927163516571651 @@ -571,7 +571,7 @@

    QF_LIA (Single Query Track)

    - + cvc5 0 441749972.98249864.09644173032138518021802 @@ -580,7 +580,7 @@

    QF_LIA (Single Query Track)

    - + smtinterpol 0 438967997.94254319.34143892915147418301830 @@ -589,7 +589,7 @@

    QF_LIA (Single Query Track)

    - + veriT 0 390951076.63151007.97939092825108423101979 @@ -613,7 +613,6 @@

    QF_LIA (Single Query Track)

    - + - diff --git a/archive/2022/results/qf-lia-unsat-core.html b/archive/2022/results/qf-lia-unsat-core.html index d2380579..28f5543c 100644 --- a/archive/2022/results/qf-lia-unsat-core.html +++ b/archive/2022/results/qf-lia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LIA (Unsat Core Track)

    Competition results for the QF_LIA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_LIA (Unsat Core Track)

    Sequential PerformanceParallel Performance Yices2Yices2 - - + + @@ -126,7 +126,7 @@

    QF_LIA (Unsat Core Track)

    - + Yices2 0 4905914 @@ -137,7 +137,7 @@

    QF_LIA (Unsat Core Track)

    - + z3-4.8.17n 0 4809398 @@ -148,7 +148,7 @@

    QF_LIA (Unsat Core Track)

    - + MathSATn 0 4807969 @@ -159,7 +159,7 @@

    QF_LIA (Unsat Core Track)

    - + 2021-cvc5-ucn 0 4385575 @@ -170,7 +170,7 @@

    QF_LIA (Unsat Core Track)

    - + smtinterpol 0 4330734 @@ -181,7 +181,7 @@

    QF_LIA (Unsat Core Track)

    - + cvc5 0 4108148 @@ -203,7 +203,7 @@

    QF_LIA (Unsat Core Track)

    - + Yices2 0 490591423317.28423316.47916 @@ -212,7 +212,7 @@

    QF_LIA (Unsat Core Track)

    - + z3-4.8.17n 0 480939853389.57353315.15138 @@ -221,7 +221,7 @@

    QF_LIA (Unsat Core Track)

    - + MathSATn 0 480796959828.10259772.97442 @@ -230,7 +230,7 @@

    QF_LIA (Unsat Core Track)

    - + 2021-cvc5-ucn 0 438557598653.02998773.96265 @@ -239,7 +239,7 @@

    QF_LIA (Unsat Core Track)

    - + smtinterpol 0 433073669301.30362026.532 @@ -248,7 +248,7 @@

    QF_LIA (Unsat Core Track)

    - + cvc5 0 4108148125293.259125297.53785 @@ -272,7 +272,6 @@

    QF_LIA (Unsat Core Track)

    - + - diff --git a/archive/2022/results/qf-linearintarith-cloud.html b/archive/2022/results/qf-linearintarith-cloud.html index e2972a2e..d88f1b67 100644 --- a/archive/2022/results/qf-linearintarith-cloud.html +++ b/archive/2022/results/qf-linearintarith-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LinearIntArith (Cloud Track)

    Competition results for the QF_LinearIntArith - + division - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    QF_LinearIntArith (Cloud Track)

    - + SMTS cube-and-conquer (fixed) 54 1749032.7981713472-50 @@ -126,7 +126,7 @@

    QF_LinearIntArith (Cloud Track)

    - + SMTS portfolio 57 1443800.3671414075-50 @@ -135,7 +135,7 @@

    QF_LinearIntArith (Cloud Track)

    - + cvc5-cloud 59 1091543.085103779-50 @@ -144,7 +144,7 @@

    QF_LinearIntArith (Cloud Track)

    - + SMTS cube-and-conquer 60 943366.51699080-50 @@ -164,7 +164,7 @@

    QF_LinearIntArith (Cloud Track)

    - + SMTS cube-and-conquer (fixed) 28 1319689.6531313032390 @@ -173,7 +173,7 @@

    QF_LinearIntArith (Cloud Track)

    - + SMTS portfolio 30 1417926.6881414031390 @@ -182,7 +182,7 @@

    QF_LinearIntArith (Cloud Track)

    - + SMTS cube-and-conquer 33 919279.32999036390 @@ -191,7 +191,7 @@

    QF_LinearIntArith (Cloud Track)

    - + cvc5-cloud 34 350734.5933042390 @@ -211,7 +211,7 @@

    QF_LinearIntArith (Cloud Track)

    - + cvc5-cloud 25 726408.49570725520 @@ -220,7 +220,7 @@

    QF_LinearIntArith (Cloud Track)

    - + SMTS cube-and-conquer (fixed) 26 418015.74540428520 @@ -229,7 +229,7 @@

    QF_LinearIntArith (Cloud Track)

    - + SMTS cube-and-conquer 27 017893.65300032520 @@ -238,7 +238,7 @@

    QF_LinearIntArith (Cloud Track)

    - + SMTS portfolio 27 020854.86800032520 @@ -258,7 +258,7 @@

    QF_LinearIntArith (Cloud Track)

    - + cvc5-cloud 4 51958.12550584-580 @@ -267,7 +267,7 @@

    QF_LinearIntArith (Cloud Track)

    - + SMTS portfolio 6 111989.6961111078-568 @@ -276,7 +276,7 @@

    QF_LinearIntArith (Cloud Track)

    - + SMTS cube-and-conquer (fixed) 7 92010.26399080-571 @@ -285,7 +285,7 @@

    QF_LinearIntArith (Cloud Track)

    - + SMTS cube-and-conquer 7 52028.70755084-573 @@ -309,7 +309,6 @@

    QF_LinearIntArith (Cloud Track)

    - + - diff --git a/archive/2022/results/qf-linearintarith-incremental.html b/archive/2022/results/qf-linearintarith-incremental.html index 3cf682a1..8988e4bc 100644 --- a/archive/2022/results/qf-linearintarith-incremental.html +++ b/archive/2022/results/qf-linearintarith-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LinearIntArith (Incremental Track)

    Competition results for the QF_LinearIntArith - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_LinearIntArith (Incremental Track)

    Parallel Performance Yices2 - - + + @@ -124,7 +124,7 @@

    QF_LinearIntArith (Incremental Track)

    - + 2021-Yices2 incrementaln 0 2004033132529.77232010.022883020 @@ -133,7 +133,7 @@

    QF_LinearIntArith (Incremental Track)

    - + Yices2 0 2004033132607.90532086.355883020 @@ -142,7 +142,7 @@

    QF_LinearIntArith (Incremental Track)

    - + z3-4.8.17n 0 1963753652201.51851619.056403678032 @@ -151,7 +151,7 @@

    QF_LinearIntArith (Incremental Track)

    - + MathSATn 0 1737328641517.38241028.9372667928026 @@ -160,7 +160,7 @@

    QF_LinearIntArith (Incremental Track)

    - + smtinterpol 0 1079883565748.62564494.4069242379048 @@ -169,7 +169,7 @@

    QF_LinearIntArith (Incremental Track)

    - + cvc5 0 373325565635.31965606.88816307959050 @@ -178,7 +178,7 @@

    QF_LinearIntArith (Incremental Track)

    - + OpenSMT 0 83441656697.84656693.36619206798042 @@ -202,7 +202,6 @@

    QF_LinearIntArith (Incremental Track)

    - + - diff --git a/archive/2022/results/qf-linearintarith-model-validation.html b/archive/2022/results/qf-linearintarith-model-validation.html index b0e6cba4..8a2020f7 100644 --- a/archive/2022/results/qf-linearintarith-model-validation.html +++ b/archive/2022/results/qf-linearintarith-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LinearIntArith (Model Validation Track)

    Competition results for the QF_LinearIntArith - + division - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_LinearIntArith (Model Validation Track)

    Sequential PerformanceParallel Performance Z3++Z3++ - - + + @@ -126,7 +126,7 @@

    QF_LinearIntArith (Model Validation Track)

    - + Z3++ 0 4762 @@ -137,7 +137,7 @@

    QF_LinearIntArith (Model Validation Track)

    - + 2020-z3n 0 4733 @@ -148,7 +148,7 @@

    QF_LinearIntArith (Model Validation Track)

    - + z3-4.8.17n 0 4698 @@ -159,7 +159,7 @@

    QF_LinearIntArith (Model Validation Track)

    - + OpenSMT 0 4590 @@ -170,7 +170,7 @@

    QF_LinearIntArith (Model Validation Track)

    - + Yices2 0 4557 @@ -181,7 +181,7 @@

    QF_LinearIntArith (Model Validation Track)

    - + cvc5 0 4554 @@ -192,7 +192,7 @@

    QF_LinearIntArith (Model Validation Track)

    - + MathSATn 0 4477 @@ -203,7 +203,7 @@

    QF_LinearIntArith (Model Validation Track)

    - + smtinterpol 0 4262 @@ -225,7 +225,7 @@

    QF_LinearIntArith (Model Validation Track)

    - + Z3++ 0 4762332852.845333897.413165 @@ -234,7 +234,7 @@

    QF_LinearIntArith (Model Validation Track)

    - + 2020-z3n 0 4733394945.643394780.196188 @@ -243,7 +243,7 @@

    QF_LinearIntArith (Model Validation Track)

    - + z3-4.8.17n 0 4698409514.15409184.857240 @@ -252,7 +252,7 @@

    QF_LinearIntArith (Model Validation Track)

    - + OpenSMT 0 4590708497.928708374.352348 @@ -261,7 +261,7 @@

    QF_LinearIntArith (Model Validation Track)

    - + Yices2 0 4557536651.157536583.814383 @@ -270,7 +270,7 @@

    QF_LinearIntArith (Model Validation Track)

    - + cvc5 0 4554631377.756631283.769385 @@ -279,7 +279,7 @@

    QF_LinearIntArith (Model Validation Track)

    - + MathSATn 0 4477667673.659667593.876437 @@ -288,7 +288,7 @@

    QF_LinearIntArith (Model Validation Track)

    - + smtinterpol 0 42621014716.304984274.658678 @@ -312,7 +312,6 @@

    QF_LinearIntArith (Model Validation Track)

    - + - diff --git a/archive/2022/results/qf-linearintarith-parallel.html b/archive/2022/results/qf-linearintarith-parallel.html index 62516efc..21f0bc97 100644 --- a/archive/2022/results/qf-linearintarith-parallel.html +++ b/archive/2022/results/qf-linearintarith-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LinearIntArith (Parallel Track)

    Competition results for the QF_LinearIntArith - + division - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    QF_LinearIntArith (Parallel Track)

    - + SMTS cube-and-conquer (fixed) 55 1684273.4851615173-50 @@ -126,7 +126,7 @@

    QF_LinearIntArith (Parallel Track)

    - + SMTS cube-and-conquer 56 1686714.7641616073-50 @@ -135,7 +135,7 @@

    QF_LinearIntArith (Parallel Track)

    - + SMTS portfolio 56 1587736.5171515074-50 @@ -155,7 +155,7 @@

    QF_LinearIntArith (Parallel Track)

    - + SMTS cube-and-conquer (fixed) 26 1534713.5441515030390 @@ -164,7 +164,7 @@

    QF_LinearIntArith (Parallel Track)

    - + SMTS cube-and-conquer 29 1636152.9511616029390 @@ -173,7 +173,7 @@

    QF_LinearIntArith (Parallel Track)

    - + SMTS portfolio 29 1537171.5921515030390 @@ -193,7 +193,7 @@

    QF_LinearIntArith (Parallel Track)

    - + SMTS cube-and-conquer 27 030161.81300027570 @@ -202,7 +202,7 @@

    QF_LinearIntArith (Parallel Track)

    - + SMTS portfolio 27 030164.92500027570 @@ -211,7 +211,7 @@

    QF_LinearIntArith (Parallel Track)

    - + SMTS cube-and-conquer (fixed) 29 129159.94110126570 @@ -231,7 +231,7 @@

    QF_LinearIntArith (Parallel Track)

    - + SMTS portfolio 0 111990.7881111078-578 @@ -240,7 +240,7 @@

    QF_LinearIntArith (Parallel Track)

    - + SMTS cube-and-conquer 0 82020.33888081-581 @@ -249,7 +249,7 @@

    QF_LinearIntArith (Parallel Track)

    - + SMTS cube-and-conquer (fixed) 0 72025.1377082-582 @@ -273,7 +273,6 @@

    QF_LinearIntArith (Parallel Track)

    - + - diff --git a/archive/2022/results/qf-linearintarith-proof-exhibition.html b/archive/2022/results/qf-linearintarith-proof-exhibition.html index 6549d171..0206f85d 100644 --- a/archive/2022/results/qf-linearintarith-proof-exhibition.html +++ b/archive/2022/results/qf-linearintarith-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LinearIntArith (Proof Exhibition Track)

    Competition results for the QF_LinearIntArith - + division - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_LinearIntArith (Proof Exhibition Track)

    - + smtinterpol 0 2534 @@ -130,7 +130,7 @@

    QF_LinearIntArith (Proof Exhibition Track)

    - + OpenSMT 0 2023 @@ -141,7 +141,7 @@

    QF_LinearIntArith (Proof Exhibition Track)

    - + veriT 0 1786 @@ -152,7 +152,7 @@

    QF_LinearIntArith (Proof Exhibition Track)

    - + cvc5-lfsc 0 1542 @@ -163,7 +163,7 @@

    QF_LinearIntArith (Proof Exhibition Track)

    - + cvc5 0 899 @@ -185,7 +185,7 @@

    QF_LinearIntArith (Proof Exhibition Track)

    - + smtinterpol 0 2534656059.149560314.0773060293 @@ -194,7 +194,7 @@

    QF_LinearIntArith (Proof Exhibition Track)

    - + OpenSMT 0 2023229554.504229126.08235446368 @@ -203,7 +203,7 @@

    QF_LinearIntArith (Proof Exhibition Track)

    - + veriT 0 17861188722.091188334.61310540923 @@ -212,7 +212,7 @@

    QF_LinearIntArith (Proof Exhibition Track)

    - + cvc5-lfsc 0 15421618958.0921618843.995129801290 @@ -221,7 +221,7 @@

    QF_LinearIntArith (Proof Exhibition Track)

    - + cvc5 0 8992302658.5132302289.887194101897 @@ -245,7 +245,6 @@

    QF_LinearIntArith (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/qf-linearintarith-single-query.html b/archive/2022/results/qf-linearintarith-single-query.html index 652f06c9..4e70c58b 100644 --- a/archive/2022/results/qf-linearintarith-single-query.html +++ b/archive/2022/results/qf-linearintarith-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LinearIntArith (Single Query Track)

    Competition results for the QF_LinearIntArith - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_LinearIntArith (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) OpenSMTOpenSMTZ3++ - - + + cvc5 - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_LinearIntArith (Single Query Track)

    - + 2019-Par4n 0 7117 @@ -142,7 +142,7 @@

    QF_LinearIntArith (Single Query Track)

    - + OpenSMT 0 6828 @@ -153,7 +153,7 @@

    QF_LinearIntArith (Single Query Track)

    - + Z3++ 0 6805 @@ -164,7 +164,7 @@

    QF_LinearIntArith (Single Query Track)

    - + cvc5 0 6793 @@ -175,7 +175,7 @@

    QF_LinearIntArith (Single Query Track)

    - + Yices2 0 6768 @@ -186,7 +186,7 @@

    QF_LinearIntArith (Single Query Track)

    - + MathSATn 0 6705 @@ -197,7 +197,7 @@

    QF_LinearIntArith (Single Query Track)

    - + z3-4.8.17n 0 6667 @@ -208,7 +208,7 @@

    QF_LinearIntArith (Single Query Track)

    - + smtinterpol 0 6431 @@ -219,7 +219,7 @@

    QF_LinearIntArith (Single Query Track)

    - + veriT 0 5004 @@ -241,7 +241,7 @@

    QF_LinearIntArith (Single Query Track)

    - + 2019-Par4n 0 7194575760.685363616.6497194459825962360227 @@ -250,7 +250,7 @@

    QF_LinearIntArith (Single Query Track)

    - + OpenSMT 0 68281097021.2061096597.1656828433124975957589 @@ -259,7 +259,7 @@

    QF_LinearIntArith (Single Query Track)

    - + Z3++ 0 68051016459.751019712.7826805450223036187589 @@ -268,7 +268,7 @@

    QF_LinearIntArith (Single Query Track)

    - + cvc5 0 67931157879.9251157520.0676793429225016370636 @@ -277,7 +277,7 @@

    QF_LinearIntArith (Single Query Track)

    - + Yices2 0 6768889821.582889725.9636768430124676620662 @@ -286,7 +286,7 @@

    QF_LinearIntArith (Single Query Track)

    - + MathSATn 0 67051155036.3331154680.6826705425424517250725 @@ -295,7 +295,7 @@

    QF_LinearIntArith (Single Query Track)

    - + z3-4.8.17n 0 66671169760.1421169246.8286667444022277630737 @@ -304,7 +304,7 @@

    QF_LinearIntArith (Single Query Track)

    - + smtinterpol 0 64441576295.6371477864.1446444400324419860986 @@ -313,7 +313,7 @@

    QF_LinearIntArith (Single Query Track)

    - + veriT 0 50042577291.4762577060.781500434461558242602014 @@ -333,7 +333,7 @@

    QF_LinearIntArith (Single Query Track)

    - + 2019-Par4n 0 4598269194.796150140.952459845980862746227 @@ -342,7 +342,7 @@

    QF_LinearIntArith (Single Query Track)

    - + Z3++ 0 4502341428.12342762.7894502450201812747589 @@ -351,7 +351,7 @@

    QF_LinearIntArith (Single Query Track)

    - + z3-4.8.17n 0 4440409731.554409494.4814440444002442746737 @@ -360,7 +360,7 @@

    QF_LinearIntArith (Single Query Track)

    - + OpenSMT 0 4331695202.704695006.3094331433103522747589 @@ -369,7 +369,7 @@

    QF_LinearIntArith (Single Query Track)

    - + Yices2 0 4301535460.096535420.0644301430103832746662 @@ -378,7 +378,7 @@

    QF_LinearIntArith (Single Query Track)

    - + cvc5 0 4292678110.675677912.5644292429203922746636 @@ -387,7 +387,7 @@

    QF_LinearIntArith (Single Query Track)

    - + MathSATn 0 4254651270.707651025.4174254425404302746725 @@ -396,7 +396,7 @@

    QF_LinearIntArith (Single Query Track)

    - + smtinterpol 0 40031013459.724984273.3114003400306812746986 @@ -405,7 +405,7 @@

    QF_LinearIntArith (Single Query Track)

    - + veriT 0 34461211097.5731210865.74344634460123827462014 @@ -425,7 +425,7 @@

    QF_LinearIntArith (Single Query Track)

    - + 2019-Par4n 0 2596196165.889103075.697259602596584776227 @@ -434,7 +434,7 @@

    QF_LinearIntArith (Single Query Track)

    - + cvc5 0 2501369369.25369207.5032501025011534776636 @@ -443,7 +443,7 @@

    QF_LinearIntArith (Single Query Track)

    - + OpenSMT 0 2497291418.502291190.8562497024971514782589 @@ -452,7 +452,7 @@

    QF_LinearIntArith (Single Query Track)

    - + Yices2 0 2467243961.487243905.8992467024671874776662 @@ -461,7 +461,7 @@

    QF_LinearIntArith (Single Query Track)

    - + MathSATn 0 2451393365.626393255.2652451024512034776725 @@ -470,7 +470,7 @@

    QF_LinearIntArith (Single Query Track)

    - + smtinterpol 0 2441452435.913383190.8332441024412134776986 @@ -479,7 +479,7 @@

    QF_LinearIntArith (Single Query Track)

    - + Z3++ 0 2303570369.464571265.1792303023033454782589 @@ -488,7 +488,7 @@

    QF_LinearIntArith (Single Query Track)

    - + z3-4.8.17n 0 2227649628.588649352.3472227022274274776737 @@ -497,7 +497,7 @@

    QF_LinearIntArith (Single Query Track)

    - + veriT 0 15581255793.9031255795.04155801558109647762014 @@ -517,7 +517,7 @@

    QF_LinearIntArith (Single Query Track)

    - + 2019-Par4n 0 692425294.28616684.1616924443224925060497 @@ -526,7 +526,7 @@

    QF_LinearIntArith (Single Query Track)

    - + Yices2 0 642828914.14728902.855642840472381100201002 @@ -535,7 +535,7 @@

    QF_LinearIntArith (Single Query Track)

    - + z3-4.8.17n 0 556554578.40354343.705556538531712186501839 @@ -544,7 +544,7 @@

    QF_LinearIntArith (Single Query Track)

    - + Z3++ 0 550355820.58555732.865550338301673192071909 @@ -553,7 +553,7 @@

    QF_LinearIntArith (Single Query Track)

    - + MathSATn 0 539656638.34556427.38539635771819203402034 @@ -562,7 +562,7 @@

    QF_LinearIntArith (Single Query Track)

    - + OpenSMT 0 516764811.70164664.87516732601907225672250 @@ -571,7 +571,7 @@

    QF_LinearIntArith (Single Query Track)

    - + cvc5 0 497968314.08868112.142497933311648245102451 @@ -580,7 +580,7 @@

    QF_LinearIntArith (Single Query Track)

    - + smtinterpol 0 480492001.06975540.844480431381666262602626 @@ -589,7 +589,7 @@

    QF_LinearIntArith (Single Query Track)

    - + veriT 0 445568576.83468499.743445530871368297502644 @@ -613,7 +613,6 @@

    QF_LinearIntArith (Single Query Track)

    - + - diff --git a/archive/2022/results/qf-linearintarith-unsat-core.html b/archive/2022/results/qf-linearintarith-unsat-core.html index 5fcdd7a8..e3372889 100644 --- a/archive/2022/results/qf-linearintarith-unsat-core.html +++ b/archive/2022/results/qf-linearintarith-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LinearIntArith (Unsat Core Track)

    Competition results for the QF_LinearIntArith - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_LinearIntArith (Unsat Core Track)

    Sequential PerformanceParallel Performance Yices2Yices2 - - + + @@ -126,7 +126,7 @@

    QF_LinearIntArith (Unsat Core Track)

    - + MathSATn 0 5238494 @@ -137,7 +137,7 @@

    QF_LinearIntArith (Unsat Core Track)

    - + Yices2 0 5006882 @@ -148,7 +148,7 @@

    QF_LinearIntArith (Unsat Core Track)

    - + z3-4.8.17n 0 4950336 @@ -159,7 +159,7 @@

    QF_LinearIntArith (Unsat Core Track)

    - + 2021-cvc5-ucn 0 4858335 @@ -170,7 +170,7 @@

    QF_LinearIntArith (Unsat Core Track)

    - + smtinterpol 0 4794493 @@ -181,7 +181,7 @@

    QF_LinearIntArith (Unsat Core Track)

    - + cvc5 0 4680564 @@ -203,7 +203,7 @@

    QF_LinearIntArith (Unsat Core Track)

    - + MathSATn 0 5238494169691.468169637.514129 @@ -212,7 +212,7 @@

    QF_LinearIntArith (Unsat Core Track)

    - + Yices2 0 5006882136578.908136579.582102 @@ -221,7 +221,7 @@

    QF_LinearIntArith (Unsat Core Track)

    - + z3-4.8.17n 0 4950336161777.061161701.731122 @@ -230,7 +230,7 @@

    QF_LinearIntArith (Unsat Core Track)

    - + 2021-cvc5-ucn 0 4858335206769.514206892.087148 @@ -239,7 +239,7 @@

    QF_LinearIntArith (Unsat Core Track)

    - + smtinterpol 0 4794495175705.809165777.575115 @@ -248,7 +248,7 @@

    QF_LinearIntArith (Unsat Core Track)

    - + cvc5 0 4680564229039.996229016.593164 @@ -272,7 +272,6 @@

    QF_LinearIntArith (Unsat Core Track)

    - + - diff --git a/archive/2022/results/qf-linearrealarith-cloud.html b/archive/2022/results/qf-linearrealarith-cloud.html index 58abbf0b..677422e1 100644 --- a/archive/2022/results/qf-linearrealarith-cloud.html +++ b/archive/2022/results/qf-linearrealarith-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LinearRealArith (Cloud Track)

    Competition results for the QF_LinearRealArith - + division - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    QF_LinearRealArith (Cloud Track)

    - + SMTS portfolio 3 166814.759165117-20 @@ -126,7 +126,7 @@

    QF_LinearRealArith (Cloud Track)

    - + SMTS cube-and-conquer 5 148749.578143119-20 @@ -135,7 +135,7 @@

    QF_LinearRealArith (Cloud Track)

    - + SMTS cube-and-conquer (fixed) 6 145681.526142129-20 @@ -144,7 +144,7 @@

    QF_LinearRealArith (Cloud Track)

    - + cvc5-cloud 12 423704.17542219-20 @@ -164,7 +164,7 @@

    QF_LinearRealArith (Cloud Track)

    - + SMTS portfolio 1 51776.0155501150 @@ -173,7 +173,7 @@

    QF_LinearRealArith (Cloud Track)

    - + SMTS cube-and-conquer (fixed) 2 22338.1122204150 @@ -182,7 +182,7 @@

    QF_LinearRealArith (Cloud Track)

    - + SMTS cube-and-conquer 3 33578.4983303150 @@ -191,7 +191,7 @@

    QF_LinearRealArith (Cloud Track)

    - + cvc5-cloud 4 25615.682204150 @@ -211,7 +211,7 @@

    QF_LinearRealArith (Cloud Track)

    - + SMTS portfolio 2 114002.53311011370 @@ -220,7 +220,7 @@

    QF_LinearRealArith (Cloud Track)

    - + SMTS cube-and-conquer 2 114139.06911011370 @@ -229,7 +229,7 @@

    QF_LinearRealArith (Cloud Track)

    - + SMTS cube-and-conquer (fixed) 4 122862.42512012270 @@ -238,7 +238,7 @@

    QF_LinearRealArith (Cloud Track)

    - + cvc5-cloud 8 214488.4952021270 @@ -258,7 +258,7 @@

    QF_LinearRealArith (Cloud Track)

    - + SMTS portfolio 0 7457.81672516-214 @@ -267,7 +267,7 @@

    QF_LinearRealArith (Cloud Track)

    - + SMTS cube-and-conquer 0 7482.90571616-214 @@ -276,7 +276,7 @@

    QF_LinearRealArith (Cloud Track)

    - + cvc5-cloud 0 1530.56610122-222 @@ -285,7 +285,7 @@

    QF_LinearRealArith (Cloud Track)

    - + SMTS cube-and-conquer (fixed) 2 6483.28561517-214 @@ -309,7 +309,6 @@

    QF_LinearRealArith (Cloud Track)

    - + - diff --git a/archive/2022/results/qf-linearrealarith-incremental.html b/archive/2022/results/qf-linearrealarith-incremental.html index cbd70368..d0283e95 100644 --- a/archive/2022/results/qf-linearrealarith-incremental.html +++ b/archive/2022/results/qf-linearrealarith-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LinearRealArith (Incremental Track)

    Competition results for the QF_LinearRealArith - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_LinearRealArith (Incremental Track)

    Parallel Performance OpenSMT - - + + @@ -124,7 +124,7 @@

    QF_LinearRealArith (Incremental Track)

    - + 2018-MathSAT-incrementaln 0 12568452.5958452.55325905 @@ -133,7 +133,7 @@

    QF_LinearRealArith (Incremental Track)

    - + MathSATn 0 97010951.90710951.69854509 @@ -142,7 +142,7 @@

    QF_LinearRealArith (Incremental Track)

    - + OpenSMT 0 9059214.9769214.84861006 @@ -151,7 +151,7 @@

    QF_LinearRealArith (Incremental Track)

    - + Yices2 0 90212000.012000.0613010 @@ -160,7 +160,7 @@

    QF_LinearRealArith (Incremental Track)

    - + cvc5 0 68512000.012000.0830010 @@ -169,7 +169,7 @@

    QF_LinearRealArith (Incremental Track)

    - + z3-4.8.17n 0 59712000.012000.0918010 @@ -178,7 +178,7 @@

    QF_LinearRealArith (Incremental Track)

    - + smtinterpol 0 49312000.012000.01022010 @@ -187,7 +187,7 @@

    QF_LinearRealArith (Incremental Track)

    - + solsmt 0 6012000.012000.0145506 @@ -211,7 +211,6 @@

    QF_LinearRealArith (Incremental Track)

    - + - diff --git a/archive/2022/results/qf-linearrealarith-model-validation.html b/archive/2022/results/qf-linearrealarith-model-validation.html index bbae3fc7..8ebed831 100644 --- a/archive/2022/results/qf-linearrealarith-model-validation.html +++ b/archive/2022/results/qf-linearrealarith-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LinearRealArith (Model Validation Track)

    Competition results for the QF_LinearRealArith - + division - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_LinearRealArith (Model Validation Track)

    Sequential PerformanceParallel Performance OpenSMTOpenSMT - - + + @@ -126,7 +126,7 @@

    QF_LinearRealArith (Model Validation Track)

    - + OpenSMT 0 606 @@ -137,7 +137,7 @@

    QF_LinearRealArith (Model Validation Track)

    - + 2021-Yices2 model-validationn 0 601 @@ -148,7 +148,7 @@

    QF_LinearRealArith (Model Validation Track)

    - + Yices2 0 601 @@ -159,7 +159,7 @@

    QF_LinearRealArith (Model Validation Track)

    - + cvc5 0 596 @@ -170,7 +170,7 @@

    QF_LinearRealArith (Model Validation Track)

    - + z3-4.8.17n 0 595 @@ -181,7 +181,7 @@

    QF_LinearRealArith (Model Validation Track)

    - + MathSATn 0 579 @@ -192,7 +192,7 @@

    QF_LinearRealArith (Model Validation Track)

    - + smtinterpol 0 579 @@ -214,7 +214,7 @@

    QF_LinearRealArith (Model Validation Track)

    - + OpenSMT 0 60645957.64145965.78419 @@ -223,7 +223,7 @@

    QF_LinearRealArith (Model Validation Track)

    - + 2021-Yices2 model-validationn 0 60139166.53839128.92324 @@ -232,7 +232,7 @@

    QF_LinearRealArith (Model Validation Track)

    - + Yices2 0 60139263.47139268.20624 @@ -241,7 +241,7 @@

    QF_LinearRealArith (Model Validation Track)

    - + cvc5 0 59661842.83661840.13129 @@ -250,7 +250,7 @@

    QF_LinearRealArith (Model Validation Track)

    - + z3-4.8.17n 0 59558508.5758411.5130 @@ -259,7 +259,7 @@

    QF_LinearRealArith (Model Validation Track)

    - + smtinterpol 0 58195706.19990441.61744 @@ -268,7 +268,7 @@

    QF_LinearRealArith (Model Validation Track)

    - + MathSATn 0 57976609.18976602.35345 @@ -292,7 +292,6 @@

    QF_LinearRealArith (Model Validation Track)

    - + - diff --git a/archive/2022/results/qf-linearrealarith-parallel.html b/archive/2022/results/qf-linearrealarith-parallel.html index 1f9f19c3..9eb31370 100644 --- a/archive/2022/results/qf-linearrealarith-parallel.html +++ b/archive/2022/results/qf-linearrealarith-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LinearRealArith (Parallel Track)

    Competition results for the QF_LinearRealArith - + division - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    QF_LinearRealArith (Parallel Track)

    - + SMTS portfolio 4 1511764.393154118-20 @@ -126,7 +126,7 @@

    QF_LinearRealArith (Parallel Track)

    - + SMTS cube-and-conquer 4 1512382.537154118-20 @@ -135,7 +135,7 @@

    QF_LinearRealArith (Parallel Track)

    - + SMTS cube-and-conquer (fixed) 5 168752.842163137-20 @@ -155,7 +155,7 @@

    QF_LinearRealArith (Parallel Track)

    - + SMTS portfolio 2 43744.2254402150 @@ -164,7 +164,7 @@

    QF_LinearRealArith (Parallel Track)

    - + SMTS cube-and-conquer 2 43955.9564402150 @@ -173,7 +173,7 @@

    QF_LinearRealArith (Parallel Track)

    - + SMTS cube-and-conquer (fixed) 2 32969.693303150 @@ -193,7 +193,7 @@

    QF_LinearRealArith (Parallel Track)

    - + SMTS portfolio 2 115620.16811011460 @@ -202,7 +202,7 @@

    QF_LinearRealArith (Parallel Track)

    - + SMTS cube-and-conquer 2 116026.58111011460 @@ -211,7 +211,7 @@

    QF_LinearRealArith (Parallel Track)

    - + SMTS cube-and-conquer (fixed) 3 133383.15213013260 @@ -231,7 +231,7 @@

    QF_LinearRealArith (Parallel Track)

    - + SMTS portfolio 0 6487.56962417-217 @@ -240,7 +240,7 @@

    QF_LinearRealArith (Parallel Track)

    - + SMTS cube-and-conquer 0 4489.10542219-219 @@ -249,7 +249,7 @@

    QF_LinearRealArith (Parallel Track)

    - + SMTS cube-and-conquer (fixed) 1 7462.23372516-215 @@ -273,7 +273,6 @@

    QF_LinearRealArith (Parallel Track)

    - + - diff --git a/archive/2022/results/qf-linearrealarith-proof-exhibition.html b/archive/2022/results/qf-linearrealarith-proof-exhibition.html index eb0f2125..b3f94757 100644 --- a/archive/2022/results/qf-linearrealarith-proof-exhibition.html +++ b/archive/2022/results/qf-linearrealarith-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LinearRealArith (Proof Exhibition Track)

    Competition results for the QF_LinearRealArith - + division - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_LinearRealArith (Proof Exhibition Track)

    - + veriT 0 411 @@ -130,7 +130,7 @@

    QF_LinearRealArith (Proof Exhibition Track)

    - + smtinterpol 0 392 @@ -141,7 +141,7 @@

    QF_LinearRealArith (Proof Exhibition Track)

    - + cvc5-lfsc 0 358 @@ -152,7 +152,7 @@

    QF_LinearRealArith (Proof Exhibition Track)

    - + OpenSMT 0 344 @@ -163,7 +163,7 @@

    QF_LinearRealArith (Proof Exhibition Track)

    - + cvc5 0 122 @@ -185,7 +185,7 @@

    QF_LinearRealArith (Proof Exhibition Track)

    - + veriT 0 41144862.24744704.53651019 @@ -194,7 +194,7 @@

    QF_LinearRealArith (Proof Exhibition Track)

    - + smtinterpol 0 392127624.725118590.8470070 @@ -203,7 +203,7 @@

    QF_LinearRealArith (Proof Exhibition Track)

    - + cvc5-lfsc 0 358164387.126164315.0631040104 @@ -212,7 +212,7 @@

    QF_LinearRealArith (Proof Exhibition Track)

    - + OpenSMT 0 34416752.55316726.28551135 @@ -221,7 +221,7 @@

    QF_LinearRealArith (Proof Exhibition Track)

    - + cvc5 0 122400020.795399904.2763400325 @@ -245,7 +245,6 @@

    QF_LinearRealArith (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/qf-linearrealarith-single-query.html b/archive/2022/results/qf-linearrealarith-single-query.html index 1a117490..a8deb88b 100644 --- a/archive/2022/results/qf-linearrealarith-single-query.html +++ b/archive/2022/results/qf-linearrealarith-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LinearRealArith (Single Query Track)

    Competition results for the QF_LinearRealArith - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_LinearRealArith (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Yices2Yices2Yices2 - - + + cvc5 - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_LinearRealArith (Single Query Track)

    - + 2021-Yices2n 0 750 @@ -142,7 +142,7 @@

    QF_LinearRealArith (Single Query Track)

    - + Yices2 0 750 @@ -153,7 +153,7 @@

    QF_LinearRealArith (Single Query Track)

    - + cvc5 0 745 @@ -164,7 +164,7 @@

    QF_LinearRealArith (Single Query Track)

    - + OpenSMT 0 738 @@ -175,7 +175,7 @@

    QF_LinearRealArith (Single Query Track)

    - + z3-4.8.17n 0 726 @@ -186,7 +186,7 @@

    QF_LinearRealArith (Single Query Track)

    - + veriT 0 705 @@ -197,7 +197,7 @@

    QF_LinearRealArith (Single Query Track)

    - + MathSATn 0 672 @@ -208,7 +208,7 @@

    QF_LinearRealArith (Single Query Track)

    - + smtinterpol 0 655 @@ -219,7 +219,7 @@

    QF_LinearRealArith (Single Query Track)

    - + solsmt 0 237 @@ -241,7 +241,7 @@

    QF_LinearRealArith (Single Query Track)

    - + Yices2 0 750119550.888119457.25475042932175075 @@ -250,7 +250,7 @@

    QF_LinearRealArith (Single Query Track)

    - + 2021-Yices2n 0 750119604.94119639.73375042932175075 @@ -259,7 +259,7 @@

    QF_LinearRealArith (Single Query Track)

    - + cvc5 0 745144284.078144359.30974542032580080 @@ -268,7 +268,7 @@

    QF_LinearRealArith (Single Query Track)

    - + OpenSMT 0 738137406.665137300.61473842331587087 @@ -277,7 +277,7 @@

    QF_LinearRealArith (Single Query Track)

    - + z3-4.8.17n 0 726162795.611162766.9172641631099099 @@ -286,7 +286,7 @@

    QF_LinearRealArith (Single Query Track)

    - + veriT 0 705189379.141189331.4757053943111200120 @@ -295,7 +295,7 @@

    QF_LinearRealArith (Single Query Track)

    - + MathSATn 0 672229177.313229170.6886723982741530153 @@ -304,7 +304,7 @@

    QF_LinearRealArith (Single Query Track)

    - + smtinterpol 0 661290658.688277169.2336614012601640164 @@ -313,7 +313,7 @@

    QF_LinearRealArith (Single Query Track)

    - + solsmt 0 237711686.784715918.012371311065880325 @@ -333,7 +333,7 @@

    QF_LinearRealArith (Single Query Track)

    - + Yices2 0 42933391.52233348.46242942901837875 @@ -342,7 +342,7 @@

    QF_LinearRealArith (Single Query Track)

    - + 2021-Yices2n 0 42933491.0533477.55942942901837875 @@ -351,7 +351,7 @@

    QF_LinearRealArith (Single Query Track)

    - + OpenSMT 0 42348248.24948228.19642342302437887 @@ -360,7 +360,7 @@

    QF_LinearRealArith (Single Query Track)

    - + cvc5 0 42058777.35158888.07742042002737880 @@ -369,7 +369,7 @@

    QF_LinearRealArith (Single Query Track)

    - + z3-4.8.17n 0 41660462.57160452.53741641603137899 @@ -378,7 +378,7 @@

    QF_LinearRealArith (Single Query Track)

    - + smtinterpol 0 401100331.09894881.004401401046378164 @@ -387,7 +387,7 @@

    QF_LinearRealArith (Single Query Track)

    - + MathSATn 0 39881556.74781551.959398398049378153 @@ -396,7 +396,7 @@

    QF_LinearRealArith (Single Query Track)

    - + veriT 0 39487212.05387176.343394394053378120 @@ -405,7 +405,7 @@

    QF_LinearRealArith (Single Query Track)

    - + solsmt 0 131390182.871392231.2651311310316378325 @@ -425,7 +425,7 @@

    QF_LinearRealArith (Single Query Track)

    - + cvc5 0 32536306.72736271.23232503251248880 @@ -434,7 +434,7 @@

    QF_LinearRealArith (Single Query Track)

    - + Yices2 0 32136959.36636908.79232103211648875 @@ -443,7 +443,7 @@

    QF_LinearRealArith (Single Query Track)

    - + 2021-Yices2n 0 32136913.8936962.17332103211648875 @@ -452,7 +452,7 @@

    QF_LinearRealArith (Single Query Track)

    - + OpenSMT 0 31539958.41539872.41831503152248887 @@ -461,7 +461,7 @@

    QF_LinearRealArith (Single Query Track)

    - + veriT 0 31152967.08852955.132311031126488120 @@ -470,7 +470,7 @@

    QF_LinearRealArith (Single Query Track)

    - + z3-4.8.17n 0 31053133.0453114.37331003102748899 @@ -479,7 +479,7 @@

    QF_LinearRealArith (Single Query Track)

    - + MathSATn 0 27498420.56698418.729274027463488153 @@ -488,7 +488,7 @@

    QF_LinearRealArith (Single Query Track)

    - + smtinterpol 0 260141127.59133088.228260026077488164 @@ -497,7 +497,7 @@

    QF_LinearRealArith (Single Query Track)

    - + solsmt 0 106272303.913274486.7451060106231488325 @@ -517,7 +517,7 @@

    QF_LinearRealArith (Single Query Track)

    - + Yices2 0 6216622.7556524.9646213722492040204 @@ -526,7 +526,7 @@

    QF_LinearRealArith (Single Query Track)

    - + 2021-Yices2n 0 6206586.7216560.6276203712492050205 @@ -535,7 +535,7 @@

    QF_LinearRealArith (Single Query Track)

    - + OpenSMT 0 5917837.4747801.8395913462452340234 @@ -544,7 +544,7 @@

    QF_LinearRealArith (Single Query Track)

    - + z3-4.8.17n 0 5209040.9989005.415202992213050305 @@ -553,7 +553,7 @@

    QF_LinearRealArith (Single Query Track)

    - + veriT 0 5129106.3629053.1225122912213130313 @@ -562,7 +562,7 @@

    QF_LinearRealArith (Single Query Track)

    - + MathSATn 0 4969228.3499212.9434963031933290329 @@ -571,7 +571,7 @@

    QF_LinearRealArith (Single Query Track)

    - + cvc5 0 4959965.5819952.3694952932023300330 @@ -580,7 +580,7 @@

    QF_LinearRealArith (Single Query Track)

    - + smtinterpol 0 40815067.21812135.2214082631454170417 @@ -589,7 +589,7 @@

    QF_LinearRealArith (Single Query Track)

    - + solsmt 0 10517559.10317573.42310546597200488 @@ -613,7 +613,6 @@

    QF_LinearRealArith (Single Query Track)

    - + - diff --git a/archive/2022/results/qf-linearrealarith-unsat-core.html b/archive/2022/results/qf-linearrealarith-unsat-core.html index 39cc8c9a..37a3f5cc 100644 --- a/archive/2022/results/qf-linearrealarith-unsat-core.html +++ b/archive/2022/results/qf-linearrealarith-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LinearRealArith (Unsat Core Track)

    Competition results for the QF_LinearRealArith - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_LinearRealArith (Unsat Core Track)

    Sequential PerformanceParallel Performance Yices2Yices2 - - + + @@ -126,7 +126,7 @@

    QF_LinearRealArith (Unsat Core Track)

    - + 2020-Yices2n 0 149491 @@ -137,7 +137,7 @@

    QF_LinearRealArith (Unsat Core Track)

    - + Yices2 0 146537 @@ -148,7 +148,7 @@

    QF_LinearRealArith (Unsat Core Track)

    - + cvc5 0 142733 @@ -159,7 +159,7 @@

    QF_LinearRealArith (Unsat Core Track)

    - + z3-4.8.17n 0 122655 @@ -170,7 +170,7 @@

    QF_LinearRealArith (Unsat Core Track)

    - + MathSATn 0 106426 @@ -181,7 +181,7 @@

    QF_LinearRealArith (Unsat Core Track)

    - + smtinterpol 0 86704 @@ -203,7 +203,7 @@

    QF_LinearRealArith (Unsat Core Track)

    - + 2020-Yices2n 0 14949169046.9169038.48534 @@ -212,7 +212,7 @@

    QF_LinearRealArith (Unsat Core Track)

    - + Yices2 0 14653777508.09277462.15342 @@ -221,7 +221,7 @@

    QF_LinearRealArith (Unsat Core Track)

    - + cvc5 0 14273345924.20745899.38514 @@ -230,7 +230,7 @@

    QF_LinearRealArith (Unsat Core Track)

    - + z3-4.8.17n 0 12265575795.81975791.00638 @@ -239,7 +239,7 @@

    QF_LinearRealArith (Unsat Core Track)

    - + smtinterpol 0 106531116372.135110599.05956 @@ -248,7 +248,7 @@

    QF_LinearRealArith (Unsat Core Track)

    - + MathSATn 0 106426154226.639154230.858110 @@ -272,7 +272,6 @@

    QF_LinearRealArith (Unsat Core Track)

    - + - diff --git a/archive/2022/results/qf-lira-model-validation.html b/archive/2022/results/qf-lira-model-validation.html index d849cdec..7f1c5ef8 100644 --- a/archive/2022/results/qf-lira-model-validation.html +++ b/archive/2022/results/qf-lira-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LIRA (Model Validation Track)

    Competition results for the QF_LIRA - + logic - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_LIRA (Model Validation Track)

    Sequential PerformanceParallel Performance Yices2Yices2 - - + + @@ -126,7 +126,7 @@

    QF_LIRA (Model Validation Track)

    - + Yices2 0 1 @@ -137,7 +137,7 @@

    QF_LIRA (Model Validation Track)

    - + 2020-z3n 0 1 @@ -148,7 +148,7 @@

    QF_LIRA (Model Validation Track)

    - + z3-4.8.17n 0 1 @@ -159,7 +159,7 @@

    QF_LIRA (Model Validation Track)

    - + MathSATn 0 1 @@ -170,7 +170,7 @@

    QF_LIRA (Model Validation Track)

    - + cvc5 0 1 @@ -181,7 +181,7 @@

    QF_LIRA (Model Validation Track)

    - + smtinterpol 0 1 @@ -203,7 +203,7 @@

    QF_LIRA (Model Validation Track)

    - + Yices2 0 10.110.110 @@ -212,7 +212,7 @@

    QF_LIRA (Model Validation Track)

    - + z3-4.8.17n 0 10.360.330 @@ -221,7 +221,7 @@

    QF_LIRA (Model Validation Track)

    - + 2020-z3n 0 10.3530.3530 @@ -230,7 +230,7 @@

    QF_LIRA (Model Validation Track)

    - + MathSATn 0 10.4710.4730 @@ -239,7 +239,7 @@

    QF_LIRA (Model Validation Track)

    - + cvc5 0 14.6674.6730 @@ -248,7 +248,7 @@

    QF_LIRA (Model Validation Track)

    - + smtinterpol 0 118.1916.0790 @@ -272,7 +272,6 @@

    QF_LIRA (Model Validation Track)

    - + - diff --git a/archive/2022/results/qf-lira-proof-exhibition.html b/archive/2022/results/qf-lira-proof-exhibition.html index 6119e775..648db857 100644 --- a/archive/2022/results/qf-lira-proof-exhibition.html +++ b/archive/2022/results/qf-lira-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LIRA (Proof Exhibition Track)

    Competition results for the QF_LIRA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_LIRA (Proof Exhibition Track)

    - + smtinterpol 0 3 @@ -130,7 +130,7 @@

    QF_LIRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 1 @@ -141,7 +141,7 @@

    QF_LIRA (Proof Exhibition Track)

    - + veriT 0 0 @@ -152,7 +152,7 @@

    QF_LIRA (Proof Exhibition Track)

    - + cvc5 0 0 @@ -174,7 +174,7 @@

    QF_LIRA (Proof Exhibition Track)

    - + smtinterpol 0 32747.6952613.57822 @@ -183,7 +183,7 @@

    QF_LIRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 14947.1984947.19844 @@ -192,7 +192,7 @@

    QF_LIRA (Proof Exhibition Track)

    - + veriT 0 01214.6561214.49551 @@ -201,7 +201,7 @@

    QF_LIRA (Proof Exhibition Track)

    - + cvc5 0 06000.06000.055 @@ -225,7 +225,6 @@

    QF_LIRA (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/qf-lira-single-query.html b/archive/2022/results/qf-lira-single-query.html index 14e4504c..cc28a25a 100644 --- a/archive/2022/results/qf-lira-single-query.html +++ b/archive/2022/results/qf-lira-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LIRA (Single Query Track)

    Competition results for the QF_LIRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_LIRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Yices2Yices2Yices2 - - + + Yices2 - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_LIRA (Single Query Track)

    - + 2019-Par4n 0 7 @@ -142,7 +142,7 @@

    QF_LIRA (Single Query Track)

    - + z3-4.8.17n 0 6 @@ -153,7 +153,7 @@

    QF_LIRA (Single Query Track)

    - + Yices2 0 6 @@ -164,7 +164,7 @@

    QF_LIRA (Single Query Track)

    - + cvc5 0 6 @@ -175,7 +175,7 @@

    QF_LIRA (Single Query Track)

    - + veriT 0 5 @@ -186,7 +186,7 @@

    QF_LIRA (Single Query Track)

    - + MathSATn 0 5 @@ -197,7 +197,7 @@

    QF_LIRA (Single Query Track)

    - + smtinterpol 0 4 @@ -219,7 +219,7 @@

    QF_LIRA (Single Query Track)

    - + 2019-Par4n 0 7232.486117.55571600 @@ -228,7 +228,7 @@

    QF_LIRA (Single Query Track)

    - + z3-4.8.17n 0 61276.7321276.72861511 @@ -237,7 +237,7 @@

    QF_LIRA (Single Query Track)

    - + Yices2 0 61476.7921476.8161511 @@ -246,7 +246,7 @@

    QF_LIRA (Single Query Track)

    - + cvc5 0 62174.0182174.14261511 @@ -255,7 +255,7 @@

    QF_LIRA (Single Query Track)

    - + veriT 0 52405.0532405.05251422 @@ -264,7 +264,7 @@

    QF_LIRA (Single Query Track)

    - + MathSATn 0 52408.2122408.21351422 @@ -273,7 +273,7 @@

    QF_LIRA (Single Query Track)

    - + smtinterpol 0 43930.843769.15541333 @@ -293,7 +293,7 @@

    QF_LIRA (Single Query Track)

    - + Yices2 0 10.0760.076110061 @@ -302,7 +302,7 @@

    QF_LIRA (Single Query Track)

    - + MathSATn 0 10.1910.191110062 @@ -311,7 +311,7 @@

    QF_LIRA (Single Query Track)

    - + z3-4.8.17n 0 10.2920.291110061 @@ -320,7 +320,7 @@

    QF_LIRA (Single Query Track)

    - + 2019-Par4n 0 10.0060.298110060 @@ -329,7 +329,7 @@

    QF_LIRA (Single Query Track)

    - + veriT 0 12.4422.443110062 @@ -338,7 +338,7 @@

    QF_LIRA (Single Query Track)

    - + cvc5 0 14.2284.229110061 @@ -347,7 +347,7 @@

    QF_LIRA (Single Query Track)

    - + smtinterpol 0 118.4046.234110063 @@ -367,7 +367,7 @@

    QF_LIRA (Single Query Track)

    - + 2019-Par4n 0 6232.48117.257606010 @@ -376,7 +376,7 @@

    QF_LIRA (Single Query Track)

    - + z3-4.8.17n 0 51276.4391276.438505111 @@ -385,7 +385,7 @@

    QF_LIRA (Single Query Track)

    - + Yices2 0 51476.7161476.734505111 @@ -394,7 +394,7 @@

    QF_LIRA (Single Query Track)

    - + cvc5 0 52169.792169.913505111 @@ -403,7 +403,7 @@

    QF_LIRA (Single Query Track)

    - + veriT 0 42402.612402.61404212 @@ -412,7 +412,7 @@

    QF_LIRA (Single Query Track)

    - + MathSATn 0 42408.0212408.022404212 @@ -421,7 +421,7 @@

    QF_LIRA (Single Query Track)

    - + smtinterpol 0 33912.4363762.921303313 @@ -441,7 +441,7 @@

    QF_LIRA (Single Query Track)

    - + Yices2 0 548.42248.42451422 @@ -450,7 +450,7 @@

    QF_LIRA (Single Query Track)

    - + z3-4.8.17n 0 550.44250.43451422 @@ -459,7 +459,7 @@

    QF_LIRA (Single Query Track)

    - + 2019-Par4n 0 550.74650.5451422 @@ -468,7 +468,7 @@

    QF_LIRA (Single Query Track)

    - + veriT 0 553.05353.05251422 @@ -477,7 +477,7 @@

    QF_LIRA (Single Query Track)

    - + MathSATn 0 556.21256.21351422 @@ -486,7 +486,7 @@

    QF_LIRA (Single Query Track)

    - + cvc5 0 565.70965.7151422 @@ -495,7 +495,7 @@

    QF_LIRA (Single Query Track)

    - + smtinterpol 0 3130.973108.88831244 @@ -519,7 +519,6 @@

    QF_LIRA (Single Query Track)

    - + - diff --git a/archive/2022/results/qf-lira-unsat-core.html b/archive/2022/results/qf-lira-unsat-core.html index 044585ea..2ae072ad 100644 --- a/archive/2022/results/qf-lira-unsat-core.html +++ b/archive/2022/results/qf-lira-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LIRA (Unsat Core Track)

    Competition results for the QF_LIRA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_LIRA (Unsat Core Track)

    Sequential PerformanceParallel Performance —— - - + + @@ -126,7 +126,7 @@

    QF_LIRA (Unsat Core Track)

    - + Yices2 0 0 @@ -137,7 +137,7 @@

    QF_LIRA (Unsat Core Track)

    - + z3-4.8.17n 0 0 @@ -148,7 +148,7 @@

    QF_LIRA (Unsat Core Track)

    - + 2021-cvc5-ucn 0 0 @@ -159,7 +159,7 @@

    QF_LIRA (Unsat Core Track)

    - + MathSATn 0 0 @@ -170,7 +170,7 @@

    QF_LIRA (Unsat Core Track)

    - + smtinterpol 0 0 @@ -181,7 +181,7 @@

    QF_LIRA (Unsat Core Track)

    - + cvc5 0 0 @@ -203,7 +203,7 @@

    QF_LIRA (Unsat Core Track)

    - + Yices2 0 0358.973359.0430 @@ -212,7 +212,7 @@

    QF_LIRA (Unsat Core Track)

    - + z3-4.8.17n 0 0742.622740.6720 @@ -221,7 +221,7 @@

    QF_LIRA (Unsat Core Track)

    - + 2021-cvc5-ucn 0 01209.8661209.8651 @@ -230,7 +230,7 @@

    QF_LIRA (Unsat Core Track)

    - + MathSATn 0 01285.1411285.1480 @@ -239,7 +239,7 @@

    QF_LIRA (Unsat Core Track)

    - + smtinterpol 0 02691.4362555.4512 @@ -248,7 +248,7 @@

    QF_LIRA (Unsat Core Track)

    - + cvc5 0 02764.0872760.6382 @@ -272,7 +272,6 @@

    QF_LIRA (Unsat Core Track)

    - + - diff --git a/archive/2022/results/qf-lra-cloud.html b/archive/2022/results/qf-lra-cloud.html index be7a4320..0bb60a27 100644 --- a/archive/2022/results/qf-lra-cloud.html +++ b/archive/2022/results/qf-lra-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LRA (Cloud Track)

    Competition results for the QF_LRA - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    QF_LRA (Cloud Track)

    - + SMTS portfolio 1 164367.121651130 @@ -126,7 +126,7 @@

    QF_LRA (Cloud Track)

    - + SMTS cube-and-conquer 3 146302.1341431150 @@ -135,7 +135,7 @@

    QF_LRA (Cloud Track)

    - + SMTS cube-and-conquer (fixed) 4 143233.6291421250 @@ -144,7 +144,7 @@

    QF_LRA (Cloud Track)

    - + cvc5-cloud 10 418904.175422150 @@ -164,7 +164,7 @@

    QF_LRA (Cloud Track)

    - + SMTS portfolio 1 51776.0155501120 @@ -173,7 +173,7 @@

    QF_LRA (Cloud Track)

    - + SMTS cube-and-conquer (fixed) 2 22338.1122204120 @@ -182,7 +182,7 @@

    QF_LRA (Cloud Track)

    - + SMTS cube-and-conquer 3 33578.4983303120 @@ -191,7 +191,7 @@

    QF_LRA (Cloud Track)

    - + cvc5-cloud 4 25615.682204120 @@ -211,7 +211,7 @@

    QF_LRA (Cloud Track)

    - + SMTS portfolio 0 111602.53311011160 @@ -220,7 +220,7 @@

    QF_LRA (Cloud Track)

    - + SMTS cube-and-conquer 0 111739.06911011160 @@ -229,7 +229,7 @@

    QF_LRA (Cloud Track)

    - + SMTS cube-and-conquer (fixed) 2 12462.42512012060 @@ -238,7 +238,7 @@

    QF_LRA (Cloud Track)

    - + cvc5-cloud 6 212088.4952021060 @@ -258,7 +258,7 @@

    QF_LRA (Cloud Track)

    - + SMTS portfolio 0 7362.1777251212 @@ -267,7 +267,7 @@

    QF_LRA (Cloud Track)

    - + SMTS cube-and-conquer 0 7387.4617161212 @@ -276,7 +276,7 @@

    QF_LRA (Cloud Track)

    - + cvc5-cloud 0 1434.5661011818 @@ -285,7 +285,7 @@

    QF_LRA (Cloud Track)

    - + SMTS cube-and-conquer (fixed) 2 6387.476151311 @@ -309,7 +309,6 @@

    QF_LRA (Cloud Track)

    - + - diff --git a/archive/2022/results/qf-lra-incremental.html b/archive/2022/results/qf-lra-incremental.html index bac2b958..70a4e777 100644 --- a/archive/2022/results/qf-lra-incremental.html +++ b/archive/2022/results/qf-lra-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LRA (Incremental Track)

    Competition results for the QF_LRA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_LRA (Incremental Track)

    Parallel Performance OpenSMT - - + + @@ -124,7 +124,7 @@

    QF_LRA (Incremental Track)

    - + 2018-MathSAT-incrementaln 0 12568452.5958452.5532595 @@ -133,7 +133,7 @@

    QF_LRA (Incremental Track)

    - + MathSATn 0 97010951.90710951.6985459 @@ -142,7 +142,7 @@

    QF_LRA (Incremental Track)

    - + OpenSMT 0 9059214.9769214.8486106 @@ -151,7 +151,7 @@

    QF_LRA (Incremental Track)

    - + Yices2 0 90212000.012000.061310 @@ -160,7 +160,7 @@

    QF_LRA (Incremental Track)

    - + cvc5 0 68512000.012000.083010 @@ -169,7 +169,7 @@

    QF_LRA (Incremental Track)

    - + z3-4.8.17n 0 59712000.012000.091810 @@ -178,7 +178,7 @@

    QF_LRA (Incremental Track)

    - + smtinterpol 0 49312000.012000.0102210 @@ -187,7 +187,7 @@

    QF_LRA (Incremental Track)

    - + solsmt 0 6012000.012000.014556 @@ -211,7 +211,6 @@

    QF_LRA (Incremental Track)

    - + - diff --git a/archive/2022/results/qf-lra-model-validation.html b/archive/2022/results/qf-lra-model-validation.html index b57d2695..1007ad0a 100644 --- a/archive/2022/results/qf-lra-model-validation.html +++ b/archive/2022/results/qf-lra-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LRA (Model Validation Track)

    Competition results for the QF_LRA - + logic - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_LRA (Model Validation Track)

    Sequential PerformanceParallel Performance OpenSMTOpenSMT - - + + @@ -126,7 +126,7 @@

    QF_LRA (Model Validation Track)

    - + OpenSMT 0 503 @@ -137,7 +137,7 @@

    QF_LRA (Model Validation Track)

    - + z3-4.8.17n 0 493 @@ -148,7 +148,7 @@

    QF_LRA (Model Validation Track)

    - + 2021-Yices2 model-validationn 0 492 @@ -159,7 +159,7 @@

    QF_LRA (Model Validation Track)

    - + Yices2 0 492 @@ -170,7 +170,7 @@

    QF_LRA (Model Validation Track)

    - + cvc5 0 490 @@ -181,7 +181,7 @@

    QF_LRA (Model Validation Track)

    - + smtinterpol 0 478 @@ -192,7 +192,7 @@

    QF_LRA (Model Validation Track)

    - + MathSATn 0 474 @@ -214,7 +214,7 @@

    QF_LRA (Model Validation Track)

    - + OpenSMT 0 50330849.4730855.16413 @@ -223,7 +223,7 @@

    QF_LRA (Model Validation Track)

    - + z3-4.8.17n 0 49347165.1147075.80823 @@ -232,7 +232,7 @@

    QF_LRA (Model Validation Track)

    - + 2021-Yices2 model-validationn 0 49238502.49438464.7124 @@ -241,7 +241,7 @@

    QF_LRA (Model Validation Track)

    - + Yices2 0 49238595.66338600.19824 @@ -250,7 +250,7 @@

    QF_LRA (Model Validation Track)

    - + cvc5 0 49055744.3755750.71226 @@ -259,7 +259,7 @@

    QF_LRA (Model Validation Track)

    - + smtinterpol 0 47981063.06276719.74237 @@ -268,7 +268,7 @@

    QF_LRA (Model Validation Track)

    - + MathSATn 0 47469807.62169813.78841 @@ -292,7 +292,6 @@

    QF_LRA (Model Validation Track)

    - + - diff --git a/archive/2022/results/qf-lra-parallel.html b/archive/2022/results/qf-lra-parallel.html index 8c704a55..c1ac28e7 100644 --- a/archive/2022/results/qf-lra-parallel.html +++ b/archive/2022/results/qf-lra-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LRA (Parallel Track)

    Competition results for the QF_LRA - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    QF_LRA (Parallel Track)

    - + SMTS portfolio 2 156964.3931541140 @@ -126,7 +126,7 @@

    QF_LRA (Parallel Track)

    - + SMTS cube-and-conquer 2 157582.5371541140 @@ -135,7 +135,7 @@

    QF_LRA (Parallel Track)

    - + SMTS cube-and-conquer (fixed) 3 163952.8421631330 @@ -155,7 +155,7 @@

    QF_LRA (Parallel Track)

    - + SMTS portfolio 2 43744.2254402120 @@ -164,7 +164,7 @@

    QF_LRA (Parallel Track)

    - + SMTS cube-and-conquer 2 43955.9564402120 @@ -173,7 +173,7 @@

    QF_LRA (Parallel Track)

    - + SMTS cube-and-conquer (fixed) 2 32969.693303120 @@ -193,7 +193,7 @@

    QF_LRA (Parallel Track)

    - + SMTS portfolio 0 113220.16811011250 @@ -202,7 +202,7 @@

    QF_LRA (Parallel Track)

    - + SMTS cube-and-conquer 0 113626.58111011250 @@ -211,7 +211,7 @@

    QF_LRA (Parallel Track)

    - + SMTS cube-and-conquer (fixed) 1 13983.15213013050 @@ -231,7 +231,7 @@

    QF_LRA (Parallel Track)

    - + SMTS portfolio 0 6391.5696241313 @@ -240,7 +240,7 @@

    QF_LRA (Parallel Track)

    - + SMTS cube-and-conquer 0 4393.1054221515 @@ -249,7 +249,7 @@

    QF_LRA (Parallel Track)

    - + SMTS cube-and-conquer (fixed) 1 7366.2337251211 @@ -273,7 +273,6 @@

    QF_LRA (Parallel Track)

    - + - diff --git a/archive/2022/results/qf-lra-proof-exhibition.html b/archive/2022/results/qf-lra-proof-exhibition.html index 512cc7d5..c42b4de3 100644 --- a/archive/2022/results/qf-lra-proof-exhibition.html +++ b/archive/2022/results/qf-lra-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LRA (Proof Exhibition Track)

    Competition results for the QF_LRA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_LRA (Proof Exhibition Track)

    - + OpenSMT 0 344 @@ -130,7 +130,7 @@

    QF_LRA (Proof Exhibition Track)

    - + smtinterpol 0 313 @@ -141,7 +141,7 @@

    QF_LRA (Proof Exhibition Track)

    - + veriT 0 302 @@ -152,7 +152,7 @@

    QF_LRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 294 @@ -163,7 +163,7 @@

    QF_LRA (Proof Exhibition Track)

    - + cvc5 0 114 @@ -185,7 +185,7 @@

    QF_LRA (Proof Exhibition Track)

    - + OpenSMT 0 34416752.55316726.28555 @@ -194,7 +194,7 @@

    QF_LRA (Proof Exhibition Track)

    - + smtinterpol 0 31380525.73273380.4913636 @@ -203,7 +203,7 @@

    QF_LRA (Proof Exhibition Track)

    - + veriT 0 30231364.82331330.5274715 @@ -212,7 +212,7 @@

    QF_LRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 29497120.10797048.7565555 @@ -221,7 +221,7 @@

    QF_LRA (Proof Exhibition Track)

    - + cvc5 0 114275453.785275354.636235222 @@ -245,7 +245,6 @@

    QF_LRA (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/qf-lra-single-query.html b/archive/2022/results/qf-lra-single-query.html index 14f49e1f..6dd81d2b 100644 --- a/archive/2022/results/qf-lra-single-query.html +++ b/archive/2022/results/qf-lra-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LRA (Single Query Track)

    Competition results for the QF_LRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_LRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) OpenSMTOpenSMTYices2 - - + + OpenSMT - - + + OpenSMT - + @@ -131,7 +131,7 @@

    QF_LRA (Single Query Track)

    - + OpenSMT 0 546 @@ -142,7 +142,7 @@

    QF_LRA (Single Query Track)

    - + 2021-Yices2n 0 537 @@ -153,7 +153,7 @@

    QF_LRA (Single Query Track)

    - + Yices2 0 537 @@ -164,7 +164,7 @@

    QF_LRA (Single Query Track)

    - + cvc5 0 536 @@ -175,7 +175,7 @@

    QF_LRA (Single Query Track)

    - + z3-4.8.17n 0 520 @@ -186,7 +186,7 @@

    QF_LRA (Single Query Track)

    - + veriT 0 497 @@ -197,7 +197,7 @@

    QF_LRA (Single Query Track)

    - + smtinterpol 0 476 @@ -208,7 +208,7 @@

    QF_LRA (Single Query Track)

    - + MathSATn 0 473 @@ -219,7 +219,7 @@

    QF_LRA (Single Query Track)

    - + solsmt 0 160 @@ -241,7 +241,7 @@

    QF_LRA (Single Query Track)

    - + OpenSMT 0 54656834.78256785.4445463242223232 @@ -250,7 +250,7 @@

    QF_LRA (Single Query Track)

    - + 2021-Yices2n 0 53776068.376018.3225373242134141 @@ -259,7 +259,7 @@

    QF_LRA (Single Query Track)

    - + Yices2 0 53776230.77176168.2635373242134141 @@ -268,7 +268,7 @@

    QF_LRA (Single Query Track)

    - + cvc5 0 53692012.01592120.7375363172194242 @@ -277,7 +277,7 @@

    QF_LRA (Single Query Track)

    - + z3-4.8.17n 0 520108049.324108040.9225203182025858 @@ -286,7 +286,7 @@

    QF_LRA (Single Query Track)

    - + veriT 0 497135637.97135600.8884972922058181 @@ -295,7 +295,7 @@

    QF_LRA (Single Query Track)

    - + smtinterpol 0 481195185.43185318.8244813031789797 @@ -304,7 +304,7 @@

    QF_LRA (Single Query Track)

    - + MathSATn 0 473164676.603164670.111473297176105105 @@ -313,7 +313,7 @@

    QF_LRA (Single Query Track)

    - + solsmt 0 160521976.163521942.1731609070418205 @@ -333,7 +333,7 @@

    QF_LRA (Single Query Track)

    - + Yices2 0 32432722.19732681.12432432401823641 @@ -342,7 +342,7 @@

    QF_LRA (Single Query Track)

    - + 2021-Yices2n 0 32432765.76832726.80632432401823641 @@ -351,7 +351,7 @@

    QF_LRA (Single Query Track)

    - + OpenSMT 0 32433262.74333241.94732432401823632 @@ -360,7 +360,7 @@

    QF_LRA (Single Query Track)

    - + z3-4.8.17n 0 31849141.91549131.6531831802423658 @@ -369,7 +369,7 @@

    QF_LRA (Single Query Track)

    - + cvc5 0 31752649.91452760.16331731702523642 @@ -378,7 +378,7 @@

    QF_LRA (Single Query Track)

    - + smtinterpol 0 30385701.49681218.90830330303923697 @@ -387,7 +387,7 @@

    QF_LRA (Single Query Track)

    - + MathSATn 0 29774780.61574775.467297297045236105 @@ -396,7 +396,7 @@

    QF_LRA (Single Query Track)

    - + veriT 0 29280406.5580381.87129229205023681 @@ -405,7 +405,7 @@

    QF_LRA (Single Query Track)

    - + solsmt 0 90316240.043316206.33990900252236205 @@ -425,7 +425,7 @@

    QF_LRA (Single Query Track)

    - + OpenSMT 0 22213972.03913943.4972220222635032 @@ -434,7 +434,7 @@

    QF_LRA (Single Query Track)

    - + cvc5 0 21929762.10129760.5742190219935042 @@ -443,7 +443,7 @@

    QF_LRA (Single Query Track)

    - + 2021-Yices2n 0 21333702.53333691.51721302131535041 @@ -452,7 +452,7 @@

    QF_LRA (Single Query Track)

    - + Yices2 0 21333908.57433887.13921302131535041 @@ -461,7 +461,7 @@

    QF_LRA (Single Query Track)

    - + veriT 0 20545631.4245619.01620502052335081 @@ -470,7 +470,7 @@

    QF_LRA (Single Query Track)

    - + z3-4.8.17n 0 20249307.40949309.27220202022635058 @@ -479,7 +479,7 @@

    QF_LRA (Single Query Track)

    - + smtinterpol 0 17899883.93494499.91617801785035097 @@ -488,7 +488,7 @@

    QF_LRA (Single Query Track)

    - + MathSATn 0 17680295.98880294.644176017652350105 @@ -497,7 +497,7 @@

    QF_LRA (Single Query Track)

    - + solsmt 0 70196136.12196135.83470070158350205 @@ -517,7 +517,7 @@

    QF_LRA (Single Query Track)

    - + OpenSMT 0 4444945.5424919.116444265179134134 @@ -526,7 +526,7 @@

    QF_LRA (Single Query Track)

    - + Yices2 0 4284922.3494855.851428273155150150 @@ -535,7 +535,7 @@

    QF_LRA (Single Query Track)

    - + 2021-Yices2n 0 4284883.7044856.591428273155150150 @@ -544,7 +544,7 @@

    QF_LRA (Single Query Track)

    - + veriT 0 3436859.346817.695343209134235235 @@ -553,7 +553,7 @@

    QF_LRA (Single Query Track)

    - + z3-4.8.17n 0 3426844.4556829.899342212130236236 @@ -562,7 +562,7 @@

    QF_LRA (Single Query Track)

    - + cvc5 0 3317241.1857228.889331208123247247 @@ -571,7 +571,7 @@

    QF_LRA (Single Query Track)

    - + MathSATn 0 3306800.2296786.822330215115248248 @@ -580,7 +580,7 @@

    QF_LRA (Single Query Track)

    - + smtinterpol 0 27810560.588551.98827819187300300 @@ -589,7 +589,7 @@

    QF_LRA (Single Query Track)

    - + solsmt 0 8312026.26812024.8834241495282 @@ -613,7 +613,6 @@

    QF_LRA (Single Query Track)

    - + - diff --git a/archive/2022/results/qf-lra-unsat-core.html b/archive/2022/results/qf-lra-unsat-core.html index f9182cfd..864cb583 100644 --- a/archive/2022/results/qf-lra-unsat-core.html +++ b/archive/2022/results/qf-lra-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LRA (Unsat Core Track)

    Competition results for the QF_LRA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_LRA (Unsat Core Track)

    Sequential PerformanceParallel Performance Yices2Yices2 - - + + @@ -126,7 +126,7 @@

    QF_LRA (Unsat Core Track)

    - + 2020-Yices2n 0 149491 @@ -137,7 +137,7 @@

    QF_LRA (Unsat Core Track)

    - + Yices2 0 146537 @@ -148,7 +148,7 @@

    QF_LRA (Unsat Core Track)

    - + cvc5 0 142733 @@ -159,7 +159,7 @@

    QF_LRA (Unsat Core Track)

    - + z3-4.8.17n 0 122655 @@ -170,7 +170,7 @@

    QF_LRA (Unsat Core Track)

    - + MathSATn 0 106426 @@ -181,7 +181,7 @@

    QF_LRA (Unsat Core Track)

    - + smtinterpol 0 86704 @@ -203,7 +203,7 @@

    QF_LRA (Unsat Core Track)

    - + 2020-Yices2n 0 14949169046.9169038.48534 @@ -212,7 +212,7 @@

    QF_LRA (Unsat Core Track)

    - + Yices2 0 14653777508.09277462.15342 @@ -221,7 +221,7 @@

    QF_LRA (Unsat Core Track)

    - + cvc5 0 14273345924.20745899.38514 @@ -230,7 +230,7 @@

    QF_LRA (Unsat Core Track)

    - + z3-4.8.17n 0 12265575795.81975791.00638 @@ -239,7 +239,7 @@

    QF_LRA (Unsat Core Track)

    - + smtinterpol 0 106531116372.135110599.05956 @@ -248,7 +248,7 @@

    QF_LRA (Unsat Core Track)

    - + MathSATn 0 106426154226.639154230.858110 @@ -272,7 +272,6 @@

    QF_LRA (Unsat Core Track)

    - + - diff --git a/archive/2022/results/qf-nia-incremental.html b/archive/2022/results/qf-nia-incremental.html index 30c02f14..e1774c3f 100644 --- a/archive/2022/results/qf-nia-incremental.html +++ b/archive/2022/results/qf-nia-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_NIA (Incremental Track)

    Competition results for the QF_NIA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_NIA (Incremental Track)

    Parallel Performance smtinterpol - - + + @@ -124,7 +124,7 @@

    QF_NIA (Incremental Track)

    - + 2021-MathSAT5n 0 41816571721.7551473.87300 @@ -133,7 +133,7 @@

    QF_NIA (Incremental Track)

    - + MathSATn 0 41816571791.9741544.84600 @@ -142,7 +142,7 @@

    QF_NIA (Incremental Track)

    - + z3-4.8.17n 0 41816572325.6882105.81900 @@ -151,7 +151,7 @@

    QF_NIA (Incremental Track)

    - + smtinterpol 0 41816574117.5363176.73800 @@ -160,7 +160,7 @@

    QF_NIA (Incremental Track)

    - + cvc5 0 168001611561.72611547.69625016419 @@ -169,7 +169,7 @@

    QF_NIA (Incremental Track)

    - + Yices2 0 20495112000.012000.0397670610 @@ -193,7 +193,6 @@

    QF_NIA (Incremental Track)

    - + - diff --git a/archive/2022/results/qf-nia-proof-exhibition.html b/archive/2022/results/qf-nia-proof-exhibition.html index e930141d..45f241c9 100644 --- a/archive/2022/results/qf-nia-proof-exhibition.html +++ b/archive/2022/results/qf-nia-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_NIA (Proof Exhibition Track)

    Competition results for the QF_NIA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_NIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 2287 @@ -130,7 +130,7 @@

    QF_NIA (Proof Exhibition Track)

    - + cvc5 0 573 @@ -152,7 +152,7 @@

    QF_NIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 22871469591.3841469206.84711211113 @@ -161,7 +161,7 @@

    QF_NIA (Proof Exhibition Track)

    - + cvc5 0 5733304253.2173302522.48128352665 @@ -185,7 +185,6 @@

    QF_NIA (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/qf-nia-single-query.html b/archive/2022/results/qf-nia-single-query.html index ac0e58d8..02adf8d8 100644 --- a/archive/2022/results/qf-nia-single-query.html +++ b/archive/2022/results/qf-nia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_NIA (Single Query Track)

    Competition results for the QF_NIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_NIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + Z3++ - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_NIA (Single Query Track)

    - + Z3++-fixedn 0 9982 @@ -142,7 +142,7 @@

    QF_NIA (Single Query Track)

    - + Yices-ismt-fixedn 0 9759 @@ -153,7 +153,7 @@

    QF_NIA (Single Query Track)

    - + 2019-Par4n 0 9348 @@ -164,7 +164,7 @@

    QF_NIA (Single Query Track)

    - + z3-4.8.17n 0 9246 @@ -175,7 +175,7 @@

    QF_NIA (Single Query Track)

    - + MathSATn 0 8321 @@ -186,7 +186,7 @@

    QF_NIA (Single Query Track)

    - + cvc5 0 8301 @@ -197,7 +197,7 @@

    QF_NIA (Single Query Track)

    - + Yices2 0 7862 @@ -208,7 +208,7 @@

    QF_NIA (Single Query Track)

    - + Z3++ 1 10158 @@ -219,7 +219,7 @@

    QF_NIA (Single Query Track)

    - + Yices-ismt 26 9817 @@ -241,7 +241,7 @@

    QF_NIA (Single Query Track)

    - + Z3++-fixedn 0 99823091874.4343091240.02499826781320122472023 @@ -250,7 +250,7 @@

    QF_NIA (Single Query Track)

    - + Yices-ismt-fixedn 0 97593619713.0133618838.6199759690428552470998 @@ -259,7 +259,7 @@

    QF_NIA (Single Query Track)

    - + 2019-Par4n 0 95674083392.353444119.98295676488307926622628 @@ -268,7 +268,7 @@

    QF_NIA (Single Query Track)

    - + z3-4.8.17n 0 92464148224.1234147323.85292466311293529832977 @@ -277,7 +277,7 @@

    QF_NIA (Single Query Track)

    - + MathSATn 0 83205121044.9715119996.31183205572274839093909 @@ -286,7 +286,7 @@

    QF_NIA (Single Query Track)

    - + cvc5 0 83026535229.8396540516.95583025787251539273922 @@ -295,7 +295,7 @@

    QF_NIA (Single Query Track)

    - + Yices2 0 78625443754.845443386.34678625100276243674365 @@ -304,7 +304,7 @@

    QF_NIA (Single Query Track)

    - + Z3++ 1 101583077521.3563076521.269101586955320320712013 @@ -313,7 +313,7 @@

    QF_NIA (Single Query Track)

    - + Yices-ismt 26 98173493752.7523493131.59198176966285124121980 @@ -333,7 +333,7 @@

    QF_NIA (Single Query Track)

    - + Yices-ismt-fixedn 0 6904868968.315868585.7016904690403394986998 @@ -342,7 +342,7 @@

    QF_NIA (Single Query Track)

    - + Z3++-fixedn 0 6781936525.524936024.34367816781046249862023 @@ -351,7 +351,7 @@

    QF_NIA (Single Query Track)

    - + 2019-Par4n 0 64881607830.8751103768.17864886488075549862628 @@ -360,7 +360,7 @@

    QF_NIA (Single Query Track)

    - + z3-4.8.17n 0 63111511226.2461510555.55963116311093249862977 @@ -369,7 +369,7 @@

    QF_NIA (Single Query Track)

    - + cvc5 0 57873470314.5593475681.333578757870145649863922 @@ -378,7 +378,7 @@

    QF_NIA (Single Query Track)

    - + MathSATn 0 55722344987.8452344263.769557255720167149863909 @@ -387,7 +387,7 @@

    QF_NIA (Single Query Track)

    - + Yices2 0 51002723709.6552723411.298510051000214349864365 @@ -396,7 +396,7 @@

    QF_NIA (Single Query Track)

    - + Z3++ 1 6955925715.127925099.50469556955028849862013 @@ -405,7 +405,7 @@

    QF_NIA (Single Query Track)

    - + Yices-ismt 26 6966776779.311776293.41369666966027749861980 @@ -425,7 +425,7 @@

    QF_NIA (Single Query Track)

    - + Z3++ 0 3203428617.763428231.15832030320333086962013 @@ -434,7 +434,7 @@

    QF_NIA (Single Query Track)

    - + Z3++-fixedn 0 3201431179.084431045.37932010320133286962023 @@ -443,7 +443,7 @@

    QF_NIA (Single Query Track)

    - + 2019-Par4n 0 3079724761.244596837.57430790307945486962628 @@ -452,7 +452,7 @@

    QF_NIA (Single Query Track)

    - + z3-4.8.17n 0 2935893397.877893168.29329350293559886962977 @@ -461,7 +461,7 @@

    QF_NIA (Single Query Track)

    - + Yices-ismt-fixedn 0 28551007521.6381007157.3592855028556788696998 @@ -470,7 +470,7 @@

    QF_NIA (Single Query Track)

    - + Yices-ismt 0 2851973500.001973334.78828510285168286961980 @@ -479,7 +479,7 @@

    QF_NIA (Single Query Track)

    - + Yices2 0 2762976445.186976375.04827620276277186964365 @@ -488,7 +488,7 @@

    QF_NIA (Single Query Track)

    - + MathSATn 0 27481032457.1261032132.54227480274878586963909 @@ -497,7 +497,7 @@

    QF_NIA (Single Query Track)

    - + cvc5 0 25151321315.2791321235.622251502515101886963922 @@ -517,7 +517,7 @@

    QF_NIA (Single Query Track)

    - + 2019-Par4n 0 8410167165.287114362.00184105594281638193785 @@ -526,7 +526,7 @@

    QF_NIA (Single Query Track)

    - + Yices2 0 6817144874.142144696.51268174349246854125412 @@ -535,7 +535,7 @@

    QF_NIA (Single Query Track)

    - + Yices-ismt 0 6817145027.615144746.15768174349246854125411 @@ -544,7 +544,7 @@

    QF_NIA (Single Query Track)

    - + Yices-ismt-fixedn 0 6815144951.618144754.43168154347246854145413 @@ -553,7 +553,7 @@

    QF_NIA (Single Query Track)

    - + Z3++-fixedn 0 6569157870.651157583.73565693840272956605660 @@ -562,7 +562,7 @@

    QF_NIA (Single Query Track)

    - + Z3++ 0 6569157966.231157612.65365693843272656605660 @@ -571,7 +571,7 @@

    QF_NIA (Single Query Track)

    - + z3-4.8.17n 0 6517164469.19163956.06865174429208857125712 @@ -580,7 +580,7 @@

    QF_NIA (Single Query Track)

    - + MathSATn 0 6340169188.477168833.14963404049229158895889 @@ -589,7 +589,7 @@

    QF_NIA (Single Query Track)

    - + cvc5 0 4252210473.191210249.84942522194205879777972 @@ -613,7 +613,6 @@

    QF_NIA (Single Query Track)

    - + - diff --git a/archive/2022/results/qf-nira-proof-exhibition.html b/archive/2022/results/qf-nira-proof-exhibition.html index af9ad0f1..ae283b4d 100644 --- a/archive/2022/results/qf-nira-proof-exhibition.html +++ b/archive/2022/results/qf-nira-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_NIRA (Proof Exhibition Track)

    Competition results for the QF_NIRA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_NIRA (Proof Exhibition Track)

    - + cvc5 0 1 @@ -130,7 +130,7 @@

    QF_NIRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 1 @@ -152,7 +152,7 @@

    QF_NIRA (Proof Exhibition Track)

    - + cvc5 0 11200.081200.07911 @@ -161,7 +161,7 @@

    QF_NIRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 11200.1041200.10311 @@ -185,7 +185,6 @@

    QF_NIRA (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/qf-nira-single-query.html b/archive/2022/results/qf-nira-single-query.html index 3954fc58..106f4b2d 100644 --- a/archive/2022/results/qf-nira-single-query.html +++ b/archive/2022/results/qf-nira-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_NIRA (Single Query Track)

    Competition results for the QF_NIRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_NIRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5— - - + + cvc5 - - + + — - + @@ -131,7 +131,7 @@

    QF_NIRA (Single Query Track)

    - + z3-4.8.17n 0 1 @@ -142,7 +142,7 @@

    QF_NIRA (Single Query Track)

    - + cvc5 0 1 @@ -153,7 +153,7 @@

    QF_NIRA (Single Query Track)

    - + MathSATn 0 1 @@ -164,7 +164,7 @@

    QF_NIRA (Single Query Track)

    - + Yices2 0 0 @@ -186,7 +186,7 @@

    QF_NIRA (Single Query Track)

    - + z3-4.8.17n 0 11203.1941203.19310111 @@ -195,7 +195,7 @@

    QF_NIRA (Single Query Track)

    - + cvc5 0 11412.0091412.0710111 @@ -204,7 +204,7 @@

    QF_NIRA (Single Query Track)

    - + MathSATn 0 11442.6281442.74310111 @@ -213,7 +213,7 @@

    QF_NIRA (Single Query Track)

    - + Yices2 0 02400.02400.000022 @@ -233,7 +233,7 @@

    QF_NIRA (Single Query Track)

    - + cvc5 0 00.00.0000021 @@ -242,7 +242,7 @@

    QF_NIRA (Single Query Track)

    - + Yices2 0 00.00.0000022 @@ -251,7 +251,7 @@

    QF_NIRA (Single Query Track)

    - + MathSATn 0 00.00.0000021 @@ -260,7 +260,7 @@

    QF_NIRA (Single Query Track)

    - + z3-4.8.17n 0 00.00.0000021 @@ -280,7 +280,7 @@

    QF_NIRA (Single Query Track)

    - + z3-4.8.17n 0 13.1943.193101011 @@ -289,7 +289,7 @@

    QF_NIRA (Single Query Track)

    - + cvc5 0 1212.009212.07101011 @@ -298,7 +298,7 @@

    QF_NIRA (Single Query Track)

    - + MathSATn 0 1242.628242.743101011 @@ -307,7 +307,7 @@

    QF_NIRA (Single Query Track)

    - + Yices2 0 01200.01200.0000112 @@ -327,7 +327,7 @@

    QF_NIRA (Single Query Track)

    - + z3-4.8.17n 0 127.19427.19310111 @@ -336,7 +336,7 @@

    QF_NIRA (Single Query Track)

    - + cvc5 0 048.048.000022 @@ -345,7 +345,7 @@

    QF_NIRA (Single Query Track)

    - + Yices2 0 048.048.000022 @@ -354,7 +354,7 @@

    QF_NIRA (Single Query Track)

    - + MathSATn 0 048.048.000022 @@ -378,7 +378,6 @@

    QF_NIRA (Single Query Track)

    - + - diff --git a/archive/2022/results/qf-nonlinearintarith-incremental.html b/archive/2022/results/qf-nonlinearintarith-incremental.html index 0dccc2d0..6d37fe1d 100644 --- a/archive/2022/results/qf-nonlinearintarith-incremental.html +++ b/archive/2022/results/qf-nonlinearintarith-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_NonLinearIntArith (Incremental Track)

    Competition results for the QF_NonLinearIntArith - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_NonLinearIntArith (Incremental Track)

    Parallel Performance smtinterpol - - + + @@ -124,7 +124,7 @@

    QF_NonLinearIntArith (Incremental Track)

    - + 2021-MathSAT5n 0 41816571721.7551473.873000 @@ -133,7 +133,7 @@

    QF_NonLinearIntArith (Incremental Track)

    - + MathSATn 0 41816571791.9741544.846000 @@ -142,7 +142,7 @@

    QF_NonLinearIntArith (Incremental Track)

    - + z3-4.8.17n 0 41816572325.6882105.819000 @@ -151,7 +151,7 @@

    QF_NonLinearIntArith (Incremental Track)

    - + smtinterpol 0 41816574117.5363176.738000 @@ -160,7 +160,7 @@

    QF_NonLinearIntArith (Incremental Track)

    - + cvc5 0 168001611561.72611547.696250164109 @@ -169,7 +169,7 @@

    QF_NonLinearIntArith (Incremental Track)

    - + Yices2 0 20495112000.012000.03976706010 @@ -193,7 +193,6 @@

    QF_NonLinearIntArith (Incremental Track)

    - + - diff --git a/archive/2022/results/qf-nonlinearintarith-proof-exhibition.html b/archive/2022/results/qf-nonlinearintarith-proof-exhibition.html index a6e3339c..ee8f7f55 100644 --- a/archive/2022/results/qf-nonlinearintarith-proof-exhibition.html +++ b/archive/2022/results/qf-nonlinearintarith-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_NonLinearIntArith (Proof Exhibition Track)

    Competition results for the QF_NonLinearIntArith - + division - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_NonLinearIntArith (Proof Exhibition Track)

    - + cvc5-lfsc 0 2288 @@ -130,7 +130,7 @@

    QF_NonLinearIntArith (Proof Exhibition Track)

    - + cvc5 0 574 @@ -152,7 +152,7 @@

    QF_NonLinearIntArith (Proof Exhibition Track)

    - + cvc5-lfsc 0 22881470791.4881470406.951112201114 @@ -161,7 +161,7 @@

    QF_NonLinearIntArith (Proof Exhibition Track)

    - + cvc5 0 5743305453.2973303722.56283602666 @@ -185,7 +185,6 @@

    QF_NonLinearIntArith (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/qf-nonlinearintarith-single-query.html b/archive/2022/results/qf-nonlinearintarith-single-query.html index 8c4693f6..77e1087e 100644 --- a/archive/2022/results/qf-nonlinearintarith-single-query.html +++ b/archive/2022/results/qf-nonlinearintarith-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_NonLinearIntArith (Single Query Track)

    Competition results for the QF_NonLinearIntArith - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_NonLinearIntArith (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + Z3++ - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + Z3++-fixedn 0 9982 @@ -142,7 +142,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + Yices-ismt-fixedn 0 9759 @@ -153,7 +153,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + 2019-Par4n 0 9348 @@ -164,7 +164,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + z3-4.8.17n 0 9247 @@ -175,7 +175,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + MathSATn 0 8322 @@ -186,7 +186,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + cvc5 0 8302 @@ -197,7 +197,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + Yices2 0 7862 @@ -208,7 +208,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + Z3++ 1 10158 @@ -219,7 +219,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + Yices-ismt 26 9817 @@ -241,7 +241,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + Z3++-fixedn 0 99823091874.4343091240.024998267813201224722023 @@ -250,7 +250,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + Yices-ismt-fixedn 0 97593619713.0133618838.61997596904285524702998 @@ -259,7 +259,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + 2019-Par4n 0 95674083392.353444119.982956764883079266222628 @@ -268,7 +268,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + z3-4.8.17n 0 92474149427.3174148527.045924763112936298402978 @@ -277,7 +277,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + MathSATn 0 83215122487.5995121439.054832155722749391003910 @@ -286,7 +286,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + cvc5 0 83036536641.8486541929.025830357872516392803923 @@ -295,7 +295,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + Yices2 0 78625446154.845445786.346786251002762436904367 @@ -304,7 +304,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + Z3++ 1 101583077521.3563076521.2691015869553203207122013 @@ -313,7 +313,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + Yices-ismt 26 98173493752.7523493131.591981769662851241221980 @@ -333,7 +333,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + Yices-ismt-fixedn 0 6904868968.315868585.7016904690403394988998 @@ -342,7 +342,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + Z3++-fixedn 0 6781936525.524936024.34367816781046249882023 @@ -351,7 +351,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + 2019-Par4n 0 64881607830.8751103768.17864886488075549882628 @@ -360,7 +360,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + z3-4.8.17n 0 63111511226.2461510555.55963116311093249882978 @@ -369,7 +369,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + cvc5 0 57873470314.5593475681.333578757870145649883923 @@ -378,7 +378,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + MathSATn 0 55722344987.8452344263.769557255720167149883910 @@ -387,7 +387,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + Yices2 0 51002723709.6552723411.298510051000214349884367 @@ -396,7 +396,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + Z3++ 1 6955925715.127925099.50469556955028849882013 @@ -405,7 +405,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + Yices-ismt 26 6966776779.311776293.41369666966027749881980 @@ -425,7 +425,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + Z3++ 0 3203428617.763428231.15832030320333086982013 @@ -434,7 +434,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + Z3++-fixedn 0 3201431179.084431045.37932010320133286982023 @@ -443,7 +443,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + 2019-Par4n 0 3079724761.244596837.57430790307945486982628 @@ -452,7 +452,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + z3-4.8.17n 0 2936893401.071893171.48529360293659886972978 @@ -461,7 +461,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + Yices-ismt-fixedn 0 28551007521.6381007157.3592855028556788698998 @@ -470,7 +470,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + Yices-ismt 0 2851973500.001973334.78828510285168286981980 @@ -479,7 +479,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + Yices2 0 2762977645.186977575.04827620276277286974367 @@ -488,7 +488,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + MathSATn 0 27491032699.7541032375.28527490274978586973910 @@ -497,7 +497,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + cvc5 0 25161321527.2881321447.692251602516101886973923 @@ -517,7 +517,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + 2019-Par4n 0 8410167165.287114362.001841055942816381923785 @@ -526,7 +526,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + Yices2 0 6817144922.142144744.512681743492468541405414 @@ -535,7 +535,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + Yices-ismt 0 6817145027.615144746.157681743492468541225411 @@ -544,7 +544,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + Yices-ismt-fixedn 0 6815144951.618144754.431681543472468541425413 @@ -553,7 +553,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + Z3++-fixedn 0 6569157870.651157583.735656938402729566025660 @@ -562,7 +562,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + Z3++ 0 6569157966.231157612.653656938432726566025660 @@ -571,7 +571,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + z3-4.8.17n 0 6518164496.384163983.261651844292089571305713 @@ -580,7 +580,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + MathSATn 0 6340169236.477168881.149634040492291589105891 @@ -589,7 +589,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + cvc5 0 4252210521.191210297.849425221942058797907974 @@ -613,7 +613,6 @@

    QF_NonLinearIntArith (Single Query Track)

    - + - diff --git a/archive/2022/results/qf-nonlinearrealarith-proof-exhibition.html b/archive/2022/results/qf-nonlinearrealarith-proof-exhibition.html index e2af2d84..1139b53c 100644 --- a/archive/2022/results/qf-nonlinearrealarith-proof-exhibition.html +++ b/archive/2022/results/qf-nonlinearrealarith-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_NonLinearRealArith (Proof Exhibition Track)

    Competition results for the QF_NonLinearRealArith - + division - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_NonLinearRealArith (Proof Exhibition Track)

    - + cvc5-lfsc 0 2854 @@ -130,7 +130,7 @@

    QF_NonLinearRealArith (Proof Exhibition Track)

    - + cvc5 0 1557 @@ -152,7 +152,7 @@

    QF_NonLinearRealArith (Proof Exhibition Track)

    - + cvc5-lfsc 0 2854172408.524172365.7211050105 @@ -161,7 +161,7 @@

    QF_NonLinearRealArith (Proof Exhibition Track)

    - + cvc5 0 15571667334.6881667086.604140201368 @@ -185,7 +185,6 @@

    QF_NonLinearRealArith (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/qf-nonlinearrealarith-single-query.html b/archive/2022/results/qf-nonlinearrealarith-single-query.html index 2a8b5298..63ffc20e 100644 --- a/archive/2022/results/qf-nonlinearrealarith-single-query.html +++ b/archive/2022/results/qf-nonlinearrealarith-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_NonLinearRealArith (Single Query Track)

    Competition results for the QF_NonLinearRealArith - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_NonLinearRealArith (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5Z3++ - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + Z3++-fixedn 0 2641 @@ -142,7 +142,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + 2019-Par4n 0 2629 @@ -153,7 +153,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + cvc5 0 2545 @@ -164,7 +164,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + NRA-LS 0 2488 @@ -175,7 +175,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + Yices2 0 2341 @@ -186,7 +186,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + z3-4.8.17n 0 2275 @@ -197,7 +197,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + SMT-RAT-MCSAT 22.06 0 2189 @@ -208,7 +208,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + veriT+raSAT+Redlog 0 1879 @@ -219,7 +219,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + MathSATn 0 1544 @@ -230,7 +230,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + Z3++ 6 2634 @@ -252,7 +252,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + 2019-Par4n 0 2650412116.989346590.8212650131013402580200 @@ -261,7 +261,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + Z3++-fixedn 0 2641379553.38379423.2992641134013012670265 @@ -270,7 +270,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + cvc5 0 2545526363.395526298.4882545124413013630363 @@ -279,7 +279,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + NRA-LS 0 2488550607.043551413.40524881198129042005 @@ -288,7 +288,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + Yices2 0 2341702330.553702302.832341115011915670567 @@ -297,7 +297,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + z3-4.8.17n 0 2275666962.06666934.0462275122910466330499 @@ -306,7 +306,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + SMT-RAT-MCSAT 22.06 0 2189895429.739895399.2262189112310667190674 @@ -315,7 +315,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + veriT+raSAT+Redlog 0 18791206582.3281206082.811187990597410290989 @@ -324,7 +324,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + MathSATn 0 15441671701.0331671625.85515444171127136401364 @@ -333,7 +333,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + Z3++ 6 2634379887.798379749.9282634133313012740264 @@ -353,7 +353,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + Z3++-fixedn 0 1340138024.587137945.426134013400901478265 @@ -362,7 +362,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + Z3++ 0 1333139336.035139236.716133313330971478264 @@ -371,7 +371,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + 2019-Par4n 0 1310213683.539169246.7671310131001201478200 @@ -380,7 +380,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + cvc5 0 1244270666.703270732.5251244124401861478363 @@ -389,7 +389,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + z3-4.8.17n 0 1229212691.942212695.0261229122902011478499 @@ -398,7 +398,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + NRA-LS 0 1198306992.76307433.82311981198023214785 @@ -407,7 +407,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + Yices2 0 1150343815.982343799.5171150115002801478567 @@ -416,7 +416,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + SMT-RAT-MCSAT 22.06 0 1123372615.186372598.7151123112303071478674 @@ -425,7 +425,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + veriT+raSAT+Redlog 0 905598863.169598592.93590590505251478989 @@ -434,7 +434,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + MathSATn 0 4171228570.0741228571.6374174170101314781364 @@ -454,7 +454,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + 2019-Par4n 0 134066433.4545344.054134001340281540200 @@ -463,7 +463,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + Z3++-fixedn 0 1301109996.301109945.301130101301671540265 @@ -472,7 +472,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + cvc5 0 1301123696.692123565.963130101301671540363 @@ -481,7 +481,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + NRA-LS 0 1290114879.042114971.7121290012907815405 @@ -490,7 +490,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + Yices2 0 1191226514.571226503.3131191011911771540567 @@ -499,7 +499,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + MathSATn 0 1127311130.959311054.21911270112724115401364 @@ -508,7 +508,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + SMT-RAT-MCSAT 22.06 0 1066393704.309393689.7911066010663021540674 @@ -517,7 +517,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + z3-4.8.17n 0 1046329665.195329633.3891046010463221540499 @@ -526,7 +526,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + veriT+raSAT+Redlog 0 974475719.159475489.87697409743941540989 @@ -535,7 +535,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + Z3++ 6 1301108760.907108722.226130101301671540264 @@ -555,7 +555,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + 2019-Par4n 0 248316782.93312793.512483122812554250367 @@ -564,7 +564,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + cvc5 0 227118421.74918364.2972271112511466370637 @@ -573,7 +573,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + Z3++-fixedn 0 227017686.37217632.4412270117410966380638 @@ -582,7 +582,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + NRA-LS 0 225819069.89118991.2312258108411746500650 @@ -591,7 +591,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + Yices2 0 223917320.31217289.7072239111711226690669 @@ -600,7 +600,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + z3-4.8.17n 0 221519732.44119693.7452215120010156930693 @@ -609,7 +609,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + SMT-RAT-MCSAT 22.06 0 199124874.12724834.232199110659269170896 @@ -618,7 +618,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + veriT+raSAT+Redlog 0 179926907.43626854.3111799859940110901069 @@ -627,7 +627,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + MathSATn 0 138839519.32139474.87613883651023152001520 @@ -636,7 +636,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + Z3++ 6 226617647.39317613.1012266116810986420635 @@ -660,7 +660,6 @@

    QF_NonLinearRealArith (Single Query Track)

    - + - diff --git a/archive/2022/results/qf-nra-proof-exhibition.html b/archive/2022/results/qf-nra-proof-exhibition.html index b8e45ef9..5cb86dfe 100644 --- a/archive/2022/results/qf-nra-proof-exhibition.html +++ b/archive/2022/results/qf-nra-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_NRA (Proof Exhibition Track)

    Competition results for the QF_NRA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_NRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 2854 @@ -130,7 +130,7 @@

    QF_NRA (Proof Exhibition Track)

    - + cvc5 0 1557 @@ -152,7 +152,7 @@

    QF_NRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 2854172408.524172365.721105105 @@ -161,7 +161,7 @@

    QF_NRA (Proof Exhibition Track)

    - + cvc5 0 15571667334.6881667086.60414021368 @@ -185,7 +185,6 @@

    QF_NRA (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/qf-nra-single-query.html b/archive/2022/results/qf-nra-single-query.html index 07d2c84f..b7544e22 100644 --- a/archive/2022/results/qf-nra-single-query.html +++ b/archive/2022/results/qf-nra-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_NRA (Single Query Track)

    Competition results for the QF_NRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_NRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5Z3++ - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    QF_NRA (Single Query Track)

    - + Z3++-fixedn 0 2641 @@ -142,7 +142,7 @@

    QF_NRA (Single Query Track)

    - + 2019-Par4n 0 2629 @@ -153,7 +153,7 @@

    QF_NRA (Single Query Track)

    - + cvc5 0 2545 @@ -164,7 +164,7 @@

    QF_NRA (Single Query Track)

    - + NRA-LS 0 2488 @@ -175,7 +175,7 @@

    QF_NRA (Single Query Track)

    - + Yices2 0 2341 @@ -186,7 +186,7 @@

    QF_NRA (Single Query Track)

    - + z3-4.8.17n 0 2275 @@ -197,7 +197,7 @@

    QF_NRA (Single Query Track)

    - + SMT-RAT-MCSAT 22.06 0 2189 @@ -208,7 +208,7 @@

    QF_NRA (Single Query Track)

    - + veriT+raSAT+Redlog 0 1879 @@ -219,7 +219,7 @@

    QF_NRA (Single Query Track)

    - + MathSATn 0 1544 @@ -230,7 +230,7 @@

    QF_NRA (Single Query Track)

    - + Z3++ 6 2634 @@ -252,7 +252,7 @@

    QF_NRA (Single Query Track)

    - + 2019-Par4n 0 2650412116.989346590.821265013101340258200 @@ -261,7 +261,7 @@

    QF_NRA (Single Query Track)

    - + Z3++-fixedn 0 2641379553.38379423.299264113401301267265 @@ -270,7 +270,7 @@

    QF_NRA (Single Query Track)

    - + cvc5 0 2545526363.395526298.488254512441301363363 @@ -279,7 +279,7 @@

    QF_NRA (Single Query Track)

    - + NRA-LS 0 2488550607.043551413.4052488119812904205 @@ -288,7 +288,7 @@

    QF_NRA (Single Query Track)

    - + Yices2 0 2341702330.553702302.83234111501191567567 @@ -297,7 +297,7 @@

    QF_NRA (Single Query Track)

    - + z3-4.8.17n 0 2275666962.06666934.046227512291046633499 @@ -306,7 +306,7 @@

    QF_NRA (Single Query Track)

    - + SMT-RAT-MCSAT 22.06 0 2189895429.739895399.226218911231066719674 @@ -315,7 +315,7 @@

    QF_NRA (Single Query Track)

    - + veriT+raSAT+Redlog 0 18791206582.3281206082.81118799059741029989 @@ -324,7 +324,7 @@

    QF_NRA (Single Query Track)

    - + MathSATn 0 15441671701.0331671625.8551544417112713641364 @@ -333,7 +333,7 @@

    QF_NRA (Single Query Track)

    - + Z3++ 6 2634379887.798379749.928263413331301274264 @@ -353,7 +353,7 @@

    QF_NRA (Single Query Track)

    - + Z3++-fixedn 0 1340138024.587137945.426134013400901478265 @@ -362,7 +362,7 @@

    QF_NRA (Single Query Track)

    - + Z3++ 0 1333139336.035139236.716133313330971478264 @@ -371,7 +371,7 @@

    QF_NRA (Single Query Track)

    - + 2019-Par4n 0 1310213683.539169246.7671310131001201478200 @@ -380,7 +380,7 @@

    QF_NRA (Single Query Track)

    - + cvc5 0 1244270666.703270732.5251244124401861478363 @@ -389,7 +389,7 @@

    QF_NRA (Single Query Track)

    - + z3-4.8.17n 0 1229212691.942212695.0261229122902011478499 @@ -398,7 +398,7 @@

    QF_NRA (Single Query Track)

    - + NRA-LS 0 1198306992.76307433.82311981198023214785 @@ -407,7 +407,7 @@

    QF_NRA (Single Query Track)

    - + Yices2 0 1150343815.982343799.5171150115002801478567 @@ -416,7 +416,7 @@

    QF_NRA (Single Query Track)

    - + SMT-RAT-MCSAT 22.06 0 1123372615.186372598.7151123112303071478674 @@ -425,7 +425,7 @@

    QF_NRA (Single Query Track)

    - + veriT+raSAT+Redlog 0 905598863.169598592.93590590505251478989 @@ -434,7 +434,7 @@

    QF_NRA (Single Query Track)

    - + MathSATn 0 4171228570.0741228571.6374174170101314781364 @@ -454,7 +454,7 @@

    QF_NRA (Single Query Track)

    - + 2019-Par4n 0 134066433.4545344.054134001340281540200 @@ -463,7 +463,7 @@

    QF_NRA (Single Query Track)

    - + Z3++-fixedn 0 1301109996.301109945.301130101301671540265 @@ -472,7 +472,7 @@

    QF_NRA (Single Query Track)

    - + cvc5 0 1301123696.692123565.963130101301671540363 @@ -481,7 +481,7 @@

    QF_NRA (Single Query Track)

    - + NRA-LS 0 1290114879.042114971.7121290012907815405 @@ -490,7 +490,7 @@

    QF_NRA (Single Query Track)

    - + Yices2 0 1191226514.571226503.3131191011911771540567 @@ -499,7 +499,7 @@

    QF_NRA (Single Query Track)

    - + MathSATn 0 1127311130.959311054.21911270112724115401364 @@ -508,7 +508,7 @@

    QF_NRA (Single Query Track)

    - + SMT-RAT-MCSAT 22.06 0 1066393704.309393689.7911066010663021540674 @@ -517,7 +517,7 @@

    QF_NRA (Single Query Track)

    - + z3-4.8.17n 0 1046329665.195329633.3891046010463221540499 @@ -526,7 +526,7 @@

    QF_NRA (Single Query Track)

    - + veriT+raSAT+Redlog 0 974475719.159475489.87697409743941540989 @@ -535,7 +535,7 @@

    QF_NRA (Single Query Track)

    - + Z3++ 6 1301108760.907108722.226130101301671540264 @@ -555,7 +555,7 @@

    QF_NRA (Single Query Track)

    - + 2019-Par4n 0 248316782.93312793.51248312281255425367 @@ -564,7 +564,7 @@

    QF_NRA (Single Query Track)

    - + cvc5 0 227118421.74918364.297227111251146637637 @@ -573,7 +573,7 @@

    QF_NRA (Single Query Track)

    - + Z3++-fixedn 0 227017686.37217632.441227011741096638638 @@ -582,7 +582,7 @@

    QF_NRA (Single Query Track)

    - + NRA-LS 0 225819069.89118991.231225810841174650650 @@ -591,7 +591,7 @@

    QF_NRA (Single Query Track)

    - + Yices2 0 223917320.31217289.707223911171122669669 @@ -600,7 +600,7 @@

    QF_NRA (Single Query Track)

    - + z3-4.8.17n 0 221519732.44119693.745221512001015693693 @@ -609,7 +609,7 @@

    QF_NRA (Single Query Track)

    - + SMT-RAT-MCSAT 22.06 0 199124874.12724834.23219911065926917896 @@ -618,7 +618,7 @@

    QF_NRA (Single Query Track)

    - + veriT+raSAT+Redlog 0 179926907.43626854.311179985994011091069 @@ -627,7 +627,7 @@

    QF_NRA (Single Query Track)

    - + MathSATn 0 138839519.32139474.8761388365102315201520 @@ -636,7 +636,7 @@

    QF_NRA (Single Query Track)

    - + Z3++ 6 226617647.39317613.101226611681098642635 @@ -660,7 +660,6 @@

    QF_NRA (Single Query Track)

    - + - diff --git a/archive/2022/results/qf-rdl-cloud.html b/archive/2022/results/qf-rdl-cloud.html index 70d00c2a..f07fe141 100644 --- a/archive/2022/results/qf-rdl-cloud.html +++ b/archive/2022/results/qf-rdl-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_RDL (Cloud Track)

    Competition results for the QF_RDL - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    QF_RDL (Cloud Track)

    - + SMTS cube-and-conquer 2 02447.44400040 @@ -126,7 +126,7 @@

    QF_RDL (Cloud Track)

    - + SMTS portfolio 2 02447.63900040 @@ -135,7 +135,7 @@

    QF_RDL (Cloud Track)

    - + SMTS cube-and-conquer (fixed) 2 02447.89700040 @@ -144,7 +144,7 @@

    QF_RDL (Cloud Track)

    - + cvc5-cloud 2 04800.000040 @@ -164,7 +164,7 @@

    QF_RDL (Cloud Track)

    - + SMTS cube-and-conquer 0 00.0000030 @@ -173,7 +173,7 @@

    QF_RDL (Cloud Track)

    - + SMTS cube-and-conquer (fixed) 0 00.0000030 @@ -182,7 +182,7 @@

    QF_RDL (Cloud Track)

    - + SMTS portfolio 0 00.0000030 @@ -191,7 +191,7 @@

    QF_RDL (Cloud Track)

    - + cvc5-cloud 0 00.0000030 @@ -211,7 +211,7 @@

    QF_RDL (Cloud Track)

    - + SMTS cube-and-conquer 2 02400.0000210 @@ -220,7 +220,7 @@

    QF_RDL (Cloud Track)

    - + SMTS cube-and-conquer (fixed) 2 02400.0000210 @@ -229,7 +229,7 @@

    QF_RDL (Cloud Track)

    - + SMTS portfolio 2 02400.0000210 @@ -238,7 +238,7 @@

    QF_RDL (Cloud Track)

    - + cvc5-cloud 2 02400.0000210 @@ -258,7 +258,7 @@

    QF_RDL (Cloud Track)

    - + SMTS cube-and-conquer 0 095.44400042 @@ -267,7 +267,7 @@

    QF_RDL (Cloud Track)

    - + SMTS portfolio 0 095.63900042 @@ -276,7 +276,7 @@

    QF_RDL (Cloud Track)

    - + SMTS cube-and-conquer (fixed) 0 095.81500043 @@ -285,7 +285,7 @@

    QF_RDL (Cloud Track)

    - + cvc5-cloud 0 096.000044 @@ -309,7 +309,6 @@

    QF_RDL (Cloud Track)

    - + - diff --git a/archive/2022/results/qf-rdl-model-validation.html b/archive/2022/results/qf-rdl-model-validation.html index 79f0e999..c9045033 100644 --- a/archive/2022/results/qf-rdl-model-validation.html +++ b/archive/2022/results/qf-rdl-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_RDL (Model Validation Track)

    Competition results for the QF_RDL - + logic - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_RDL (Model Validation Track)

    Sequential PerformanceParallel Performance Yices2Yices2 - - + + @@ -126,7 +126,7 @@

    QF_RDL (Model Validation Track)

    - + 2021-Yices2 model-validationn 0 109 @@ -137,7 +137,7 @@

    QF_RDL (Model Validation Track)

    - + Yices2 0 109 @@ -148,7 +148,7 @@

    QF_RDL (Model Validation Track)

    - + cvc5 0 106 @@ -159,7 +159,7 @@

    QF_RDL (Model Validation Track)

    - + MathSATn 0 105 @@ -170,7 +170,7 @@

    QF_RDL (Model Validation Track)

    - + OpenSMT 0 103 @@ -181,7 +181,7 @@

    QF_RDL (Model Validation Track)

    - + z3-4.8.17n 0 102 @@ -192,7 +192,7 @@

    QF_RDL (Model Validation Track)

    - + smtinterpol 0 101 @@ -214,7 +214,7 @@

    QF_RDL (Model Validation Track)

    - + 2021-Yices2 model-validationn 0 109664.044664.2130 @@ -223,7 +223,7 @@

    QF_RDL (Model Validation Track)

    - + Yices2 0 109667.808668.0080 @@ -232,7 +232,7 @@

    QF_RDL (Model Validation Track)

    - + cvc5 0 1066098.4666089.4193 @@ -241,7 +241,7 @@

    QF_RDL (Model Validation Track)

    - + MathSATn 0 1056801.5686788.5654 @@ -250,7 +250,7 @@

    QF_RDL (Model Validation Track)

    - + OpenSMT 0 10315108.17115110.626 @@ -259,7 +259,7 @@

    QF_RDL (Model Validation Track)

    - + z3-4.8.17n 0 10211343.4611335.7017 @@ -268,7 +268,7 @@

    QF_RDL (Model Validation Track)

    - + smtinterpol 0 10214643.13813721.8757 @@ -292,7 +292,6 @@

    QF_RDL (Model Validation Track)

    - + - diff --git a/archive/2022/results/qf-rdl-parallel.html b/archive/2022/results/qf-rdl-parallel.html index a23c5849..897a834f 100644 --- a/archive/2022/results/qf-rdl-parallel.html +++ b/archive/2022/results/qf-rdl-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_RDL (Parallel Track)

    Competition results for the QF_RDL - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    QF_RDL (Parallel Track)

    - + SMTS cube-and-conquer 2 04800.000040 @@ -126,7 +126,7 @@

    QF_RDL (Parallel Track)

    - + SMTS cube-and-conquer (fixed) 2 04800.000040 @@ -135,7 +135,7 @@

    QF_RDL (Parallel Track)

    - + SMTS portfolio 2 04800.000040 @@ -155,7 +155,7 @@

    QF_RDL (Parallel Track)

    - + SMTS cube-and-conquer 0 00.0000030 @@ -164,7 +164,7 @@

    QF_RDL (Parallel Track)

    - + SMTS cube-and-conquer (fixed) 0 00.0000030 @@ -173,7 +173,7 @@

    QF_RDL (Parallel Track)

    - + SMTS portfolio 0 00.0000030 @@ -193,7 +193,7 @@

    QF_RDL (Parallel Track)

    - + SMTS cube-and-conquer 2 02400.0000210 @@ -202,7 +202,7 @@

    QF_RDL (Parallel Track)

    - + SMTS cube-and-conquer (fixed) 2 02400.0000210 @@ -211,7 +211,7 @@

    QF_RDL (Parallel Track)

    - + SMTS portfolio 2 02400.0000210 @@ -231,7 +231,7 @@

    QF_RDL (Parallel Track)

    - + SMTS cube-and-conquer 0 096.000044 @@ -240,7 +240,7 @@

    QF_RDL (Parallel Track)

    - + SMTS cube-and-conquer (fixed) 0 096.000044 @@ -249,7 +249,7 @@

    QF_RDL (Parallel Track)

    - + SMTS portfolio 0 096.000044 @@ -273,7 +273,6 @@

    QF_RDL (Parallel Track)

    - + - diff --git a/archive/2022/results/qf-rdl-proof-exhibition.html b/archive/2022/results/qf-rdl-proof-exhibition.html index 3a85e438..92602c50 100644 --- a/archive/2022/results/qf-rdl-proof-exhibition.html +++ b/archive/2022/results/qf-rdl-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_RDL (Proof Exhibition Track)

    Competition results for the QF_RDL - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_RDL (Proof Exhibition Track)

    - + veriT 0 109 @@ -130,7 +130,7 @@

    QF_RDL (Proof Exhibition Track)

    - + smtinterpol 0 79 @@ -141,7 +141,7 @@

    QF_RDL (Proof Exhibition Track)

    - + cvc5-lfsc 0 64 @@ -152,7 +152,7 @@

    QF_RDL (Proof Exhibition Track)

    - + cvc5 0 8 @@ -174,7 +174,7 @@

    QF_RDL (Proof Exhibition Track)

    - + veriT 0 10913497.42413374.00944 @@ -183,7 +183,7 @@

    QF_RDL (Proof Exhibition Track)

    - + smtinterpol 0 7947098.99245210.3493434 @@ -192,7 +192,7 @@

    QF_RDL (Proof Exhibition Track)

    - + cvc5-lfsc 0 6467267.01967266.3064949 @@ -201,7 +201,7 @@

    QF_RDL (Proof Exhibition Track)

    - + cvc5 0 8124567.01124549.641105103 @@ -225,7 +225,6 @@

    QF_RDL (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/qf-rdl-single-query.html b/archive/2022/results/qf-rdl-single-query.html index a532f86d..2ef59139 100644 --- a/archive/2022/results/qf-rdl-single-query.html +++ b/archive/2022/results/qf-rdl-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_RDL (Single Query Track)

    Competition results for the QF_RDL - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_RDL (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Yices2Yices2Yices2 - - + + Yices2 - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_RDL (Single Query Track)

    - + 2021-Yices2n 0 213 @@ -142,7 +142,7 @@

    QF_RDL (Single Query Track)

    - + Yices2 0 213 @@ -153,7 +153,7 @@

    QF_RDL (Single Query Track)

    - + cvc5 0 209 @@ -164,7 +164,7 @@

    QF_RDL (Single Query Track)

    - + veriT 0 208 @@ -175,7 +175,7 @@

    QF_RDL (Single Query Track)

    - + z3-4.8.17n 0 206 @@ -186,7 +186,7 @@

    QF_RDL (Single Query Track)

    - + MathSATn 0 199 @@ -197,7 +197,7 @@

    QF_RDL (Single Query Track)

    - + OpenSMT 0 192 @@ -208,7 +208,7 @@

    QF_RDL (Single Query Track)

    - + smtinterpol 0 179 @@ -219,7 +219,7 @@

    QF_RDL (Single Query Track)

    - + solsmt 0 77 @@ -241,7 +241,7 @@

    QF_RDL (Single Query Track)

    - + Yices2 0 21343320.11743288.9912131051083434 @@ -250,7 +250,7 @@

    QF_RDL (Single Query Track)

    - + 2021-Yices2n 0 21343536.63943621.412131051083434 @@ -259,7 +259,7 @@

    QF_RDL (Single Query Track)

    - + cvc5 0 20952272.06352238.5722091031063838 @@ -268,7 +268,7 @@

    QF_RDL (Single Query Track)

    - + veriT 0 20853741.17153730.5882081021063939 @@ -277,7 +277,7 @@

    QF_RDL (Single Query Track)

    - + z3-4.8.17n 0 20654746.28754725.988206981084141 @@ -286,7 +286,7 @@

    QF_RDL (Single Query Track)

    - + MathSATn 0 19964500.7164500.577199101984848 @@ -295,7 +295,7 @@

    QF_RDL (Single Query Track)

    - + OpenSMT 0 19280571.88380515.1719299935555 @@ -304,7 +304,7 @@

    QF_RDL (Single Query Track)

    - + smtinterpol 0 18095473.25891850.40818098826767 @@ -313,7 +313,7 @@

    QF_RDL (Single Query Track)

    - + solsmt 0 77189710.621193975.837774136170120 @@ -333,7 +333,7 @@

    QF_RDL (Single Query Track)

    - + Yices2 0 105669.324667.3381051050014234 @@ -342,7 +342,7 @@

    QF_RDL (Single Query Track)

    - + 2021-Yices2n 0 105725.282750.7541051050014234 @@ -351,7 +351,7 @@

    QF_RDL (Single Query Track)

    - + cvc5 0 1036127.4376127.9141031030214238 @@ -360,7 +360,7 @@

    QF_RDL (Single Query Track)

    - + veriT 0 1026805.5036794.4721021020314239 @@ -369,7 +369,7 @@

    QF_RDL (Single Query Track)

    - + MathSATn 0 1016776.1326776.4921011010414248 @@ -378,7 +378,7 @@

    QF_RDL (Single Query Track)

    - + OpenSMT 0 9914985.50614986.24999990614255 @@ -387,7 +387,7 @@

    QF_RDL (Single Query Track)

    - + z3-4.8.17n 0 9811320.65611320.88798980714241 @@ -396,7 +396,7 @@

    QF_RDL (Single Query Track)

    - + smtinterpol 0 9814629.60213662.09698980714267 @@ -405,7 +405,7 @@

    QF_RDL (Single Query Track)

    - + solsmt 0 4173942.82876024.9264141064142120 @@ -425,7 +425,7 @@

    QF_RDL (Single Query Track)

    - + Yices2 0 1083050.7933021.6531080108113834 @@ -434,7 +434,7 @@

    QF_RDL (Single Query Track)

    - + 2021-Yices2n 0 1083211.3573270.6571080108113834 @@ -443,7 +443,7 @@

    QF_RDL (Single Query Track)

    - + z3-4.8.17n 0 1083825.6313805.1021080108113841 @@ -452,7 +452,7 @@

    QF_RDL (Single Query Track)

    - + cvc5 0 1066544.6266510.6581060106313838 @@ -461,7 +461,7 @@

    QF_RDL (Single Query Track)

    - + veriT 0 1067335.6687336.1161060106313839 @@ -470,7 +470,7 @@

    QF_RDL (Single Query Track)

    - + MathSATn 0 9818124.57818124.085980981113848 @@ -479,7 +479,7 @@

    QF_RDL (Single Query Track)

    - + OpenSMT 0 9325986.37725928.92930931613855 @@ -488,7 +488,7 @@

    QF_RDL (Single Query Track)

    - + smtinterpol 0 8241243.65638588.312820822713867 @@ -497,7 +497,7 @@

    QF_RDL (Single Query Track)

    - + solsmt 0 3676167.79378350.9113603673138120 @@ -517,7 +517,7 @@

    QF_RDL (Single Query Track)

    - + Yices2 0 1931700.4061669.11319399945454 @@ -526,7 +526,7 @@

    QF_RDL (Single Query Track)

    - + 2021-Yices2n 0 1921703.0171704.03519298945555 @@ -535,7 +535,7 @@

    QF_RDL (Single Query Track)

    - + z3-4.8.17n 0 1782196.5432175.51217887916969 @@ -544,7 +544,7 @@

    QF_RDL (Single Query Track)

    - + veriT 0 1692247.0222235.42716982877878 @@ -553,7 +553,7 @@

    QF_RDL (Single Query Track)

    - + MathSATn 0 1662428.1192426.1216688788181 @@ -562,7 +562,7 @@

    QF_RDL (Single Query Track)

    - + cvc5 0 1642724.3962723.4816485798383 @@ -571,7 +571,7 @@

    QF_RDL (Single Query Track)

    - + OpenSMT 0 1472891.9322882.7241478166100100 @@ -580,7 +580,7 @@

    QF_RDL (Single Query Track)

    - + smtinterpol 0 1304506.6383583.2331307258117117 @@ -589,7 +589,7 @@

    QF_RDL (Single Query Track)

    - + solsmt 0 225532.8355548.62222418225206 @@ -613,7 +613,6 @@

    QF_RDL (Single Query Track)

    - + - diff --git a/archive/2022/results/qf-s-proof-exhibition.html b/archive/2022/results/qf-s-proof-exhibition.html index d69ed155..d95d3c69 100644 --- a/archive/2022/results/qf-s-proof-exhibition.html +++ b/archive/2022/results/qf-s-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_S (Proof Exhibition Track)

    Competition results for the QF_S - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_S (Proof Exhibition Track)

    - + cvc5-lfsc 0 172 @@ -130,7 +130,7 @@

    QF_S (Proof Exhibition Track)

    - + cvc5 0 171 @@ -152,7 +152,7 @@

    QF_S (Proof Exhibition Track)

    - + cvc5-lfsc 0 17210.42110.30100 @@ -161,7 +161,7 @@

    QF_S (Proof Exhibition Track)

    - + cvc5 0 1711208.8911208.71611 @@ -185,7 +185,6 @@

    QF_S (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/qf-s-single-query.html b/archive/2022/results/qf-s-single-query.html index 44cb1ffa..082c32c5 100644 --- a/archive/2022/results/qf-s-single-query.html +++ b/archive/2022/results/qf-s-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_S (Single Query Track)

    Competition results for the QF_S - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_S (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + Z3str4 - - + + cvc5 - + @@ -131,7 +131,7 @@

    QF_S (Single Query Track)

    - + cvc5 0 545 @@ -142,7 +142,7 @@

    QF_S (Single Query Track)

    - + Z3str4 0 544 @@ -153,7 +153,7 @@

    QF_S (Single Query Track)

    - + 2020-CVC4n 0 543 @@ -164,7 +164,7 @@

    QF_S (Single Query Track)

    - + z3-4.8.17n 0 540 @@ -175,7 +175,7 @@

    QF_S (Single Query Track)

    - + OSTRICH 0 540 @@ -197,7 +197,7 @@

    QF_S (Single Query Track)

    - + cvc5 0 5451404.9371432.96354537317200 @@ -206,7 +206,7 @@

    QF_S (Single Query Track)

    - + Z3str4 0 5441551.5831551.70954437217211 @@ -215,7 +215,7 @@

    QF_S (Single Query Track)

    - + 2020-CVC4n 0 5436399.5956437.32254337117222 @@ -224,7 +224,7 @@

    QF_S (Single Query Track)

    - + z3-4.8.17n 0 5406152.5576151.68754036817255 @@ -233,7 +233,7 @@

    QF_S (Single Query Track)

    - + OSTRICH 0 54011299.8458552.05654036817255 @@ -253,7 +253,7 @@

    QF_S (Single Query Track)

    - + cvc5 0 3731401.2861429.362373373001720 @@ -262,7 +262,7 @@

    QF_S (Single Query Track)

    - + Z3str4 0 3721548.4091548.474372372011721 @@ -271,7 +271,7 @@

    QF_S (Single Query Track)

    - + 2020-CVC4n 0 3716384.7956421.802371371021722 @@ -280,7 +280,7 @@

    QF_S (Single Query Track)

    - + z3-4.8.17n 0 3686146.1686145.586368368051725 @@ -289,7 +289,7 @@

    QF_S (Single Query Track)

    - + OSTRICH 0 36810888.1678307.472368368051725 @@ -309,7 +309,7 @@

    QF_S (Single Query Track)

    - + Z3str4 0 1723.1743.235172017203731 @@ -318,7 +318,7 @@

    QF_S (Single Query Track)

    - + cvc5 0 1723.6513.601172017203730 @@ -327,7 +327,7 @@

    QF_S (Single Query Track)

    - + z3-4.8.17n 0 1726.3896.1172017203735 @@ -336,7 +336,7 @@

    QF_S (Single Query Track)

    - + 2020-CVC4n 0 17214.79915.52172017203732 @@ -345,7 +345,7 @@

    QF_S (Single Query Track)

    - + OSTRICH 0 172411.677244.584172017203735 @@ -365,7 +365,7 @@

    QF_S (Single Query Track)

    - + cvc5 0 540301.602301.54654036817255 @@ -374,7 +374,7 @@

    QF_S (Single Query Track)

    - + z3-4.8.17n 0 539221.319220.42953936717266 @@ -383,7 +383,7 @@

    QF_S (Single Query Track)

    - + Z3str4 0 539360.232360.34253936717266 @@ -392,7 +392,7 @@

    QF_S (Single Query Track)

    - + OSTRICH 0 5293942.8051862.7395293571721616 @@ -401,7 +401,7 @@

    QF_S (Single Query Track)

    - + 2020-CVC4n 0 5161229.5431229.6025163441722929 @@ -425,7 +425,6 @@

    QF_S (Single Query Track)

    - + - diff --git a/archive/2022/results/qf-slia-proof-exhibition.html b/archive/2022/results/qf-slia-proof-exhibition.html index ff3355e0..4581d66c 100644 --- a/archive/2022/results/qf-slia-proof-exhibition.html +++ b/archive/2022/results/qf-slia-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_SLIA (Proof Exhibition Track)

    Competition results for the QF_SLIA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_SLIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 4282 @@ -130,7 +130,7 @@

    QF_SLIA (Proof Exhibition Track)

    - + cvc5 0 3991 @@ -152,7 +152,7 @@

    QF_SLIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 4282184888.062184859.382139139 @@ -161,7 +161,7 @@

    QF_SLIA (Proof Exhibition Track)

    - + cvc5 0 3991425144.311423903.352430290 @@ -185,7 +185,6 @@

    QF_SLIA (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/qf-slia-single-query.html b/archive/2022/results/qf-slia-single-query.html index 62487145..ebe05fa7 100644 --- a/archive/2022/results/qf-slia-single-query.html +++ b/archive/2022/results/qf-slia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_SLIA (Single Query Track)

    Competition results for the QF_SLIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_SLIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    QF_SLIA (Single Query Track)

    - + 2020-CVC4n 0 14802 @@ -142,7 +142,7 @@

    QF_SLIA (Single Query Track)

    - + cvc5 0 14779 @@ -153,7 +153,7 @@

    QF_SLIA (Single Query Track)

    - + z3-4.8.17n 0 14245 @@ -164,7 +164,7 @@

    QF_SLIA (Single Query Track)

    - + Z3str4 0 14218 @@ -175,7 +175,7 @@

    QF_SLIA (Single Query Track)

    - + OSTRICH 0 8444 @@ -197,7 +197,7 @@

    QF_SLIA (Single Query Track)

    - + 2020-CVC4n 0 14802551178.488557008.7914802104964306341341 @@ -206,7 +206,7 @@

    QF_SLIA (Single Query Track)

    - + cvc5 0 14779552974.219554462.87914779104774302364363 @@ -215,7 +215,7 @@

    QF_SLIA (Single Query Track)

    - + z3-4.8.17n 0 142451244836.6811244251.95314245101154130898898 @@ -224,7 +224,7 @@

    QF_SLIA (Single Query Track)

    - + Z3str4 0 14218975196.686974900.20114218100594159925740 @@ -233,7 +233,7 @@

    QF_SLIA (Single Query Track)

    - + OSTRICH 0 84478199024.6898098263.78884474762368566966564 @@ -253,7 +253,7 @@

    QF_SLIA (Single Query Track)

    - + 2020-CVC4n 0 10496218460.218223560.88110496104960914556341 @@ -262,7 +262,7 @@

    QF_SLIA (Single Query Track)

    - + cvc5 0 10477213818.039214963.419104771047701104556363 @@ -271,7 +271,7 @@

    QF_SLIA (Single Query Track)

    - + z3-4.8.17n 0 10115728174.892727606.964101151011504724556898 @@ -280,7 +280,7 @@

    QF_SLIA (Single Query Track)

    - + Z3str4 0 10059514720.759514460.986100591005905284556740 @@ -289,7 +289,7 @@

    QF_SLIA (Single Query Track)

    - + OSTRICH 0 47627207958.1157133085.909476247620582545566564 @@ -309,7 +309,7 @@

    QF_SLIA (Single Query Track)

    - + 2020-CVC4n 0 4306186318.271187047.9143060430612810709341 @@ -318,7 +318,7 @@

    QF_SLIA (Single Query Track)

    - + cvc5 0 4302192756.18193099.4643020430213210709363 @@ -327,7 +327,7 @@

    QF_SLIA (Single Query Track)

    - + Z3str4 0 4159314075.927314039.21641590415927510709740 @@ -336,7 +336,7 @@

    QF_SLIA (Single Query Track)

    - + z3-4.8.17n 0 4130370261.789370244.98941300413030410709898 @@ -345,7 +345,7 @@

    QF_SLIA (Single Query Track)

    - + OSTRICH 0 3685848258.866822373.386368503685749107096564 @@ -365,7 +365,7 @@

    QF_SLIA (Single Query Track)

    - + cvc5 0 1441927454.04727359.64814419102184201724723 @@ -374,7 +374,7 @@

    QF_SLIA (Single Query Track)

    - + 2020-CVC4n 0 1422833640.12933660.51714228100574171915915 @@ -383,7 +383,7 @@

    QF_SLIA (Single Query Track)

    - + Z3str4 0 1356449336.449196.585135649433413115791579 @@ -392,7 +392,7 @@

    QF_SLIA (Single Query Track)

    - + z3-4.8.17n 0 1332861569.77161209.048133289244408418151815 @@ -401,7 +401,7 @@

    QF_SLIA (Single Query Track)

    - + OSTRICH 0 7403255174.238211160.78674033949345477407609 @@ -425,7 +425,6 @@

    QF_SLIA (Single Query Track)

    - + - diff --git a/archive/2022/results/qf-snia-single-query.html b/archive/2022/results/qf-snia-single-query.html index 75e6d354..a2d9850a 100644 --- a/archive/2022/results/qf-snia-single-query.html +++ b/archive/2022/results/qf-snia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_SNIA (Single Query Track)

    Competition results for the QF_SNIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_SNIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + — - - + + cvc5 - + @@ -131,7 +131,7 @@

    QF_SNIA (Single Query Track)

    - + cvc5 0 70 @@ -142,7 +142,7 @@

    QF_SNIA (Single Query Track)

    - + z3-4.8.17n 0 70 @@ -153,7 +153,7 @@

    QF_SNIA (Single Query Track)

    - + OSTRICH 0 70 @@ -175,7 +175,7 @@

    QF_SNIA (Single Query Track)

    - + cvc5 0 702.0582.0257070000 @@ -184,7 +184,7 @@

    QF_SNIA (Single Query Track)

    - + z3-4.8.17n 0 702.6762.5457070000 @@ -193,7 +193,7 @@

    QF_SNIA (Single Query Track)

    - + OSTRICH 0 70191.42109.5077070000 @@ -213,7 +213,7 @@

    QF_SNIA (Single Query Track)

    - + cvc5 0 702.0582.02570700000 @@ -222,7 +222,7 @@

    QF_SNIA (Single Query Track)

    - + z3-4.8.17n 0 702.6762.54570700000 @@ -231,7 +231,7 @@

    QF_SNIA (Single Query Track)

    - + OSTRICH 0 70191.42109.50770700000 @@ -251,7 +251,7 @@

    QF_SNIA (Single Query Track)

    - + cvc5 0 00.00.00000700 @@ -260,7 +260,7 @@

    QF_SNIA (Single Query Track)

    - + OSTRICH 0 00.00.00000700 @@ -269,7 +269,7 @@

    QF_SNIA (Single Query Track)

    - + z3-4.8.17n 0 00.00.00000700 @@ -289,7 +289,7 @@

    QF_SNIA (Single Query Track)

    - + cvc5 0 702.0582.0257070000 @@ -298,7 +298,7 @@

    QF_SNIA (Single Query Track)

    - + z3-4.8.17n 0 702.6762.5457070000 @@ -307,7 +307,7 @@

    QF_SNIA (Single Query Track)

    - + OSTRICH 0 70191.42109.5077070000 @@ -331,7 +331,6 @@

    QF_SNIA (Single Query Track)

    - + - diff --git a/archive/2022/results/qf-strings-proof-exhibition.html b/archive/2022/results/qf-strings-proof-exhibition.html index 687cfbef..efa1ad78 100644 --- a/archive/2022/results/qf-strings-proof-exhibition.html +++ b/archive/2022/results/qf-strings-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Strings (Proof Exhibition Track)

    Competition results for the QF_Strings - + division - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_Strings (Proof Exhibition Track)

    - + cvc5-lfsc 0 4454 @@ -130,7 +130,7 @@

    QF_Strings (Proof Exhibition Track)

    - + cvc5 0 4162 @@ -152,7 +152,7 @@

    QF_Strings (Proof Exhibition Track)

    - + cvc5-lfsc 0 4454184898.483184869.6831390139 @@ -161,7 +161,7 @@

    QF_Strings (Proof Exhibition Track)

    - + cvc5 0 4162426353.202425112.0684310291 @@ -185,7 +185,6 @@

    QF_Strings (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/qf-strings-single-query.html b/archive/2022/results/qf-strings-single-query.html index a73e145b..8fb7d438 100644 --- a/archive/2022/results/qf-strings-single-query.html +++ b/archive/2022/results/qf-strings-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Strings (Single Query Track)

    Competition results for the QF_Strings - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_Strings (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    QF_Strings (Single Query Track)

    - + cvc5 0 15394 @@ -142,7 +142,7 @@

    QF_Strings (Single Query Track)

    - + 2020-CVC4n 0 15345 @@ -153,7 +153,7 @@

    QF_Strings (Single Query Track)

    - + z3-4.8.17n 0 14855 @@ -164,7 +164,7 @@

    QF_Strings (Single Query Track)

    - + Z3str4 0 14762 @@ -175,7 +175,7 @@

    QF_Strings (Single Query Track)

    - + OSTRICH 0 9054 @@ -197,7 +197,7 @@

    QF_Strings (Single Query Track)

    - + cvc5 0 15394554381.213555897.867153941092044743640363 @@ -206,7 +206,7 @@

    QF_Strings (Single Query Track)

    - + 2020-CVC4n 0 15345557578.083563446.1121534510867447834370343 @@ -215,7 +215,7 @@

    QF_Strings (Single Query Track)

    - + z3-4.8.17n 0 148551250991.9141250406.185148551055343029030903 @@ -224,7 +224,7 @@

    QF_Strings (Single Query Track)

    - + Z3str4 0 14762976748.268976451.9111476210431433192670741 @@ -233,7 +233,7 @@

    QF_Strings (Single Query Track)

    - + OSTRICH 0 90578210515.9548106925.352905752003857670106569 @@ -253,7 +253,7 @@

    QF_Strings (Single Query Track)

    - + cvc5 0 10920215221.383216394.806109201092001104728363 @@ -262,7 +262,7 @@

    QF_Strings (Single Query Track)

    - + 2020-CVC4n 0 10867224845.013229982.68210867108670934798343 @@ -271,7 +271,7 @@

    QF_Strings (Single Query Track)

    - + z3-4.8.17n 0 10553734323.735733755.095105531055304774728903 @@ -280,7 +280,7 @@

    QF_Strings (Single Query Track)

    - + Z3str4 0 10431516269.168516009.46104311043105294798741 @@ -289,7 +289,7 @@

    QF_Strings (Single Query Track)

    - + OSTRICH 0 52007219037.7037141502.888520052000583047286569 @@ -309,7 +309,7 @@

    QF_Strings (Single Query Track)

    - + 2020-CVC4n 0 4478186333.07187063.4344780447812811152343 @@ -318,7 +318,7 @@

    QF_Strings (Single Query Track)

    - + cvc5 0 4474192759.831193103.06144740447413211152363 @@ -327,7 +327,7 @@

    QF_Strings (Single Query Track)

    - + Z3str4 0 4331314079.101314042.45143310433127511152741 @@ -336,7 +336,7 @@

    QF_Strings (Single Query Track)

    - + z3-4.8.17n 0 4302370268.178370251.0943020430230411152903 @@ -345,7 +345,7 @@

    QF_Strings (Single Query Track)

    - + OSTRICH 0 3857848670.544822617.97385703857749111526569 @@ -365,7 +365,7 @@

    QF_Strings (Single Query Track)

    - + cvc5 0 1502927757.70727663.218150291065643737290728 @@ -374,7 +374,7 @@

    QF_Strings (Single Query Track)

    - + 2020-CVC4n 0 1474434869.67234890.1181474410401434394470944 @@ -383,7 +383,7 @@

    QF_Strings (Single Query Track)

    - + Z3str4 0 1410349696.63149556.92714103980043031585701585 @@ -392,7 +392,7 @@

    QF_Strings (Single Query Track)

    - + z3-4.8.17n 0 1393761793.76761432.0221393796814256182101821 @@ -401,7 +401,7 @@

    QF_Strings (Single Query Track)

    - + OSTRICH 0 8002259308.463213133.032800243763626775607625 @@ -425,7 +425,6 @@

    QF_Strings (Single Query Track)

    - + - diff --git a/archive/2022/results/qf-uf-incremental.html b/archive/2022/results/qf-uf-incremental.html index 8f5eb57d..9250c049 100644 --- a/archive/2022/results/qf-uf-incremental.html +++ b/archive/2022/results/qf-uf-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UF (Incremental Track)

    Competition results for the QF_UF - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_UF (Incremental Track)

    Parallel Performance Yices2 - - + + @@ -124,7 +124,7 @@

    QF_UF (Incremental Track)

    - + Yices2 0 15814135.018112.83300 @@ -133,7 +133,7 @@

    QF_UF (Incremental Track)

    - + z3-4.8.17n 0 15814295.046270.04900 @@ -142,7 +142,7 @@

    QF_UF (Incremental Track)

    - + 2021-z3n 0 15814349.706326.88900 @@ -151,7 +151,7 @@

    QF_UF (Incremental Track)

    - + cvc5 0 158141009.88956.06500 @@ -160,7 +160,7 @@

    QF_UF (Incremental Track)

    - + smtinterpol 0 158144322.3471802.31400 @@ -169,7 +169,7 @@

    QF_UF (Incremental Track)

    - + OpenSMT 0 157831179.7681155.378310 @@ -178,7 +178,7 @@

    QF_UF (Incremental Track)

    - + MathSATn 0 716184.656167.228150980 @@ -202,7 +202,6 @@

    QF_UF (Incremental Track)

    - + - diff --git a/archive/2022/results/qf-uf-model-validation.html b/archive/2022/results/qf-uf-model-validation.html index 476d73e4..0be6fe91 100644 --- a/archive/2022/results/qf-uf-model-validation.html +++ b/archive/2022/results/qf-uf-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UF (Model Validation Track)

    Competition results for the QF_UF - + logic - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_UF (Model Validation Track)

    Sequential PerformanceParallel Performance Yices2Yices2 - - + + @@ -126,7 +126,7 @@

    QF_UF (Model Validation Track)

    - + Yices2 0 1571 @@ -137,7 +137,7 @@

    QF_UF (Model Validation Track)

    - + 2021-Yices2 model-validationn 0 1571 @@ -148,7 +148,7 @@

    QF_UF (Model Validation Track)

    - + z3-4.8.17n 0 1571 @@ -159,7 +159,7 @@

    QF_UF (Model Validation Track)

    - + cvc5 0 1571 @@ -170,7 +170,7 @@

    QF_UF (Model Validation Track)

    - + smtinterpol 0 1571 @@ -181,7 +181,7 @@

    QF_UF (Model Validation Track)

    - + OpenSMT 0 1551 @@ -192,7 +192,7 @@

    QF_UF (Model Validation Track)

    - + MathSATn 0 636 @@ -214,7 +214,7 @@

    QF_UF (Model Validation Track)

    - + 2021-Yices2 model-validationn 0 157170.27372.5630 @@ -223,7 +223,7 @@

    QF_UF (Model Validation Track)

    - + Yices2 0 157169.01574.4250 @@ -232,7 +232,7 @@

    QF_UF (Model Validation Track)

    - + z3-4.8.17n 0 1571197.3178.3910 @@ -241,7 +241,7 @@

    QF_UF (Model Validation Track)

    - + cvc5 0 1571797.319801.2650 @@ -250,7 +250,7 @@

    QF_UF (Model Validation Track)

    - + smtinterpol 0 15714560.9742000.3020 @@ -259,7 +259,7 @@

    QF_UF (Model Validation Track)

    - + OpenSMT 0 1551297.822305.3190 @@ -268,7 +268,7 @@

    QF_UF (Model Validation Track)

    - + MathSATn 0 636230.228226.8710 @@ -292,7 +292,6 @@

    QF_UF (Model Validation Track)

    - + - diff --git a/archive/2022/results/qf-uf-proof-exhibition.html b/archive/2022/results/qf-uf-proof-exhibition.html index 960d242e..f5d20783 100644 --- a/archive/2022/results/qf-uf-proof-exhibition.html +++ b/archive/2022/results/qf-uf-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UF (Proof Exhibition Track)

    Competition results for the QF_UF - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_UF (Proof Exhibition Track)

    - + OpenSMT 0 2170 @@ -130,7 +130,7 @@

    QF_UF (Proof Exhibition Track)

    - + veriT 0 2133 @@ -141,7 +141,7 @@

    QF_UF (Proof Exhibition Track)

    - + cvc5-lfsc 0 2114 @@ -152,7 +152,7 @@

    QF_UF (Proof Exhibition Track)

    - + smtinterpol 0 2105 @@ -163,7 +163,7 @@

    QF_UF (Proof Exhibition Track)

    - + cvc5 0 167 @@ -185,7 +185,7 @@

    QF_UF (Proof Exhibition Track)

    - + OpenSMT 0 217023006.8622851.2651010 @@ -194,7 +194,7 @@

    QF_UF (Proof Exhibition Track)

    - + veriT 0 213364642.66164306.8334738 @@ -203,7 +203,7 @@

    QF_UF (Proof Exhibition Track)

    - + cvc5-lfsc 0 2114297968.499297788.2676664 @@ -212,7 +212,7 @@

    QF_UF (Proof Exhibition Track)

    - + smtinterpol 0 2105131436.637110990.8957575 @@ -221,7 +221,7 @@

    QF_UF (Proof Exhibition Track)

    - + cvc5 0 1672360251.742359755.93720131926 @@ -245,7 +245,6 @@

    QF_UF (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/qf-uf-single-query.html b/archive/2022/results/qf-uf-single-query.html index 7bbe099f..80edf62d 100644 --- a/archive/2022/results/qf-uf-single-query.html +++ b/archive/2022/results/qf-uf-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UF (Single Query Track)

    Competition results for the QF_UF - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_UF (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Yices2Yices2Yices2 - - + + Yices2 - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_UF (Single Query Track)

    - + Yices2 0 3493 @@ -142,7 +142,7 @@

    QF_UF (Single Query Track)

    - + veriT 0 3492 @@ -153,7 +153,7 @@

    QF_UF (Single Query Track)

    - + OpenSMT 0 3492 @@ -164,7 +164,7 @@

    QF_UF (Single Query Track)

    - + z3-4.8.17n 0 3491 @@ -175,7 +175,7 @@

    QF_UF (Single Query Track)

    - + 2021-z3n 0 3491 @@ -186,7 +186,7 @@

    QF_UF (Single Query Track)

    - + cvc5 0 3489 @@ -197,7 +197,7 @@

    QF_UF (Single Query Track)

    - + smtinterpol 0 3409 @@ -208,7 +208,7 @@

    QF_UF (Single Query Track)

    - + MathSATn 0 3401 @@ -230,7 +230,7 @@

    QF_UF (Single Query Track)

    - + Yices2 0 34931931.5651937.04334931429206400 @@ -239,7 +239,7 @@

    QF_UF (Single Query Track)

    - + veriT 0 34922944.2832944.53934921429206311 @@ -248,7 +248,7 @@

    QF_UF (Single Query Track)

    - + OpenSMT 0 34929125.2719101.90334921429206311 @@ -257,7 +257,7 @@

    QF_UF (Single Query Track)

    - + z3-4.8.17n 0 34917690.927665.10334911429206222 @@ -266,7 +266,7 @@

    QF_UF (Single Query Track)

    - + 2021-z3n 0 34918087.528047.634911429206222 @@ -275,7 +275,7 @@

    QF_UF (Single Query Track)

    - + cvc5 0 348911305.25111257.24434891429206044 @@ -284,7 +284,7 @@

    QF_UF (Single Query Track)

    - + smtinterpol 0 3423143427.63113321.1083423142919947070 @@ -293,7 +293,7 @@

    QF_UF (Single Query Track)

    - + MathSATn 0 340158286.94758213.2783401139520069244 @@ -313,7 +313,7 @@

    QF_UF (Single Query Track)

    - + Yices2 0 142950.6555.181142914290020640 @@ -322,7 +322,7 @@

    QF_UF (Single Query Track)

    - + veriT 0 1429140.966140.974142914290020641 @@ -331,7 +331,7 @@

    QF_UF (Single Query Track)

    - + z3-4.8.17n 0 1429171.536164.366142914290020642 @@ -340,7 +340,7 @@

    QF_UF (Single Query Track)

    - + 2021-z3n 0 1429223.067223.359142914290020642 @@ -349,7 +349,7 @@

    QF_UF (Single Query Track)

    - + OpenSMT 0 1429288.077284.292142914290020641 @@ -358,7 +358,7 @@

    QF_UF (Single Query Track)

    - + cvc5 0 1429793.998787.377142914290020644 @@ -367,7 +367,7 @@

    QF_UF (Single Query Track)

    - + smtinterpol 0 14294192.8591721.2531429142900206470 @@ -376,7 +376,7 @@

    QF_UF (Single Query Track)

    - + MathSATn 0 1395176.137176.39213951395034206444 @@ -396,7 +396,7 @@

    QF_UF (Single Query Track)

    - + Yices2 0 20641880.9151881.862206402064014290 @@ -405,7 +405,7 @@

    QF_UF (Single Query Track)

    - + veriT 0 20632803.3172803.566206302063114291 @@ -414,7 +414,7 @@

    QF_UF (Single Query Track)

    - + OpenSMT 0 20638837.1958817.611206302063114291 @@ -423,7 +423,7 @@

    QF_UF (Single Query Track)

    - + z3-4.8.17n 0 20627519.3857500.737206202062214292 @@ -432,7 +432,7 @@

    QF_UF (Single Query Track)

    - + 2021-z3n 0 20627864.4537824.241206202062214292 @@ -441,7 +441,7 @@

    QF_UF (Single Query Track)

    - + cvc5 0 206010511.25210469.868206002060414294 @@ -450,7 +450,7 @@

    QF_UF (Single Query Track)

    - + MathSATn 0 200658110.81158036.88620060200658142944 @@ -459,7 +459,7 @@

    QF_UF (Single Query Track)

    - + smtinterpol 0 1994139234.771111599.85519940199470142970 @@ -479,7 +479,7 @@

    QF_UF (Single Query Track)

    - + Yices2 0 3490395.192400.67534901429206133 @@ -488,7 +488,7 @@

    QF_UF (Single Query Track)

    - + veriT 0 3489552.068552.1334891429206044 @@ -497,7 +497,7 @@

    QF_UF (Single Query Track)

    - + z3-4.8.17n 0 34532468.2712442.1283453142920244040 @@ -506,7 +506,7 @@

    QF_UF (Single Query Track)

    - + 2021-z3n 0 34512546.5852506.1693451142820234242 @@ -515,7 +515,7 @@

    QF_UF (Single Query Track)

    - + cvc5 0 34423496.6023485.7243442142820145151 @@ -524,7 +524,7 @@

    QF_UF (Single Query Track)

    - + OpenSMT 0 34393182.3663158.2223439142920105454 @@ -533,7 +533,7 @@

    QF_UF (Single Query Track)

    - + MathSATn 0 33643177.6293169.96133641395196912981 @@ -542,7 +542,7 @@

    QF_UF (Single Query Track)

    - + smtinterpol 0 334425519.17212876.896334414281916149149 @@ -566,7 +566,6 @@

    QF_UF (Single Query Track)

    - + - diff --git a/archive/2022/results/qf-uf-unsat-core.html b/archive/2022/results/qf-uf-unsat-core.html index 88890b86..59b4c3d6 100644 --- a/archive/2022/results/qf-uf-unsat-core.html +++ b/archive/2022/results/qf-uf-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UF (Unsat Core Track)

    Competition results for the QF_UF - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_UF (Unsat Core Track)

    Sequential PerformanceParallel Performance Yices2smtinterpol - - + + @@ -126,7 +126,7 @@

    QF_UF (Unsat Core Track)

    - + z3-4.8.17n 0 249324 @@ -137,7 +137,7 @@

    QF_UF (Unsat Core Track)

    - + 2021-z3n 0 248543 @@ -148,7 +148,7 @@

    QF_UF (Unsat Core Track)

    - + Yices2 0 247446 @@ -159,7 +159,7 @@

    QF_UF (Unsat Core Track)

    - + MathSATn 0 246132 @@ -170,7 +170,7 @@

    QF_UF (Unsat Core Track)

    - + smtinterpol 0 244447 @@ -181,7 +181,7 @@

    QF_UF (Unsat Core Track)

    - + cvc5 0 149371 @@ -203,7 +203,7 @@

    QF_UF (Unsat Core Track)

    - + z3-4.8.17n 0 2493245305.1725290.6580 @@ -212,7 +212,7 @@

    QF_UF (Unsat Core Track)

    - + smtinterpol 0 24876763383.69142486.84219 @@ -221,7 +221,7 @@

    QF_UF (Unsat Core Track)

    - + 2021-z3n 0 2485435780.8745834.6650 @@ -230,7 +230,7 @@

    QF_UF (Unsat Core Track)

    - + Yices2 0 2474464515.0184517.430 @@ -239,7 +239,7 @@

    QF_UF (Unsat Core Track)

    - + MathSATn 0 2461322117.6732097.6090 @@ -248,7 +248,7 @@

    QF_UF (Unsat Core Track)

    - + cvc5 0 1493717768.9487754.0231 @@ -272,7 +272,6 @@

    QF_UF (Unsat Core Track)

    - + - diff --git a/archive/2022/results/qf-ufbv-incremental.html b/archive/2022/results/qf-ufbv-incremental.html index be5fbd52..2042b08c 100644 --- a/archive/2022/results/qf-ufbv-incremental.html +++ b/archive/2022/results/qf-ufbv-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFBV (Incremental Track)

    Competition results for the QF_UFBV - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_UFBV (Incremental Track)

    Parallel Performance Bitwuzla - - + + @@ -124,7 +124,7 @@

    QF_UFBV (Incremental Track)

    - + Bitwuzla 0 27662056.3592028.7900 @@ -133,7 +133,7 @@

    QF_UFBV (Incremental Track)

    - + 2020-Yices2 incrementaln 0 27602900.0892871.19962 @@ -142,7 +142,7 @@

    QF_UFBV (Incremental Track)

    - + Yices2 0 27602911.0042881.362 @@ -151,7 +151,7 @@

    QF_UFBV (Incremental Track)

    - + z3-4.8.17n 0 27427147.5757108.866244 @@ -160,7 +160,7 @@

    QF_UFBV (Incremental Track)

    - + MathSATn 0 26977167.7397119.904695 @@ -169,7 +169,7 @@

    QF_UFBV (Incremental Track)

    - + cvc5 0 265215804.19615689.7621149 @@ -193,7 +193,6 @@

    QF_UFBV (Incremental Track)

    - + - diff --git a/archive/2022/results/qf-ufbv-model-validation.html b/archive/2022/results/qf-ufbv-model-validation.html index 9c6b5592..758e9e2f 100644 --- a/archive/2022/results/qf-ufbv-model-validation.html +++ b/archive/2022/results/qf-ufbv-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFBV (Model Validation Track)

    Competition results for the QF_UFBV - + logic - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_UFBV (Model Validation Track)

    Sequential PerformanceParallel Performance BitwuzlaBitwuzla - - + + @@ -126,7 +126,7 @@

    QF_UFBV (Model Validation Track)

    - + Bitwuzla 0 373 @@ -137,7 +137,7 @@

    QF_UFBV (Model Validation Track)

    - + 2021-Yices2 model-validationn 0 373 @@ -148,7 +148,7 @@

    QF_UFBV (Model Validation Track)

    - + Yices2 0 373 @@ -159,7 +159,7 @@

    QF_UFBV (Model Validation Track)

    - + z3-4.8.17n 0 363 @@ -170,7 +170,7 @@

    QF_UFBV (Model Validation Track)

    - + cvc5 0 362 @@ -181,7 +181,7 @@

    QF_UFBV (Model Validation Track)

    - + MathSATn 0 359 @@ -203,7 +203,7 @@

    QF_UFBV (Model Validation Track)

    - + Bitwuzla 0 3733640.2523634.0372 @@ -212,7 +212,7 @@

    QF_UFBV (Model Validation Track)

    - + Yices2 0 3734753.2044748.7192 @@ -221,7 +221,7 @@

    QF_UFBV (Model Validation Track)

    - + 2021-Yices2 model-validationn 0 3734749.6514750.6822 @@ -230,7 +230,7 @@

    QF_UFBV (Model Validation Track)

    - + z3-4.8.17n 0 36316248.79216236.56212 @@ -239,7 +239,7 @@

    QF_UFBV (Model Validation Track)

    - + cvc5 0 36224693.8224657.91413 @@ -248,7 +248,7 @@

    QF_UFBV (Model Validation Track)

    - + MathSATn 0 35920486.96620487.77616 @@ -272,7 +272,6 @@

    QF_UFBV (Model Validation Track)

    - + - diff --git a/archive/2022/results/qf-ufbv-proof-exhibition.html b/archive/2022/results/qf-ufbv-proof-exhibition.html index ecfe3e05..bc7976f6 100644 --- a/archive/2022/results/qf-ufbv-proof-exhibition.html +++ b/archive/2022/results/qf-ufbv-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFBV (Proof Exhibition Track)

    Competition results for the QF_UFBV - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_UFBV (Proof Exhibition Track)

    - + cvc5-lfsc 0 150 @@ -130,7 +130,7 @@

    QF_UFBV (Proof Exhibition Track)

    - + cvc5 0 3 @@ -152,7 +152,7 @@

    QF_UFBV (Proof Exhibition Track)

    - + cvc5-lfsc 0 150278822.668278652.34192192 @@ -161,7 +161,7 @@

    QF_UFBV (Proof Exhibition Track)

    - + cvc5 0 3402807.181402779.599339334 @@ -185,7 +185,6 @@

    QF_UFBV (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/qf-ufbv-single-query.html b/archive/2022/results/qf-ufbv-single-query.html index 4e682532..1229d4e2 100644 --- a/archive/2022/results/qf-ufbv-single-query.html +++ b/archive/2022/results/qf-ufbv-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFBV (Single Query Track)

    Competition results for the QF_UFBV - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_UFBV (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) BitwuzlaBitwuzlaYices2 - - + + Bitwuzla - - + + Bitwuzla - + @@ -131,7 +131,7 @@

    QF_UFBV (Single Query Track)

    - + Bitwuzla 0 293 @@ -142,7 +142,7 @@

    QF_UFBV (Single Query Track)

    - + 2020-Bitwuzlan 0 286 @@ -153,7 +153,7 @@

    QF_UFBV (Single Query Track)

    - + cvc5 0 272 @@ -164,7 +164,7 @@

    QF_UFBV (Single Query Track)

    - + z3-4.8.17n 0 266 @@ -175,7 +175,7 @@

    QF_UFBV (Single Query Track)

    - + Yices2 0 259 @@ -186,7 +186,7 @@

    QF_UFBV (Single Query Track)

    - + MathSATn 0 242 @@ -208,7 +208,7 @@

    QF_UFBV (Single Query Track)

    - + Bitwuzla 0 29317993.88517990.35329316013377 @@ -217,7 +217,7 @@

    QF_UFBV (Single Query Track)

    - + 2020-Bitwuzlan 0 28616324.72216325.084286160126146 @@ -226,7 +226,7 @@

    QF_UFBV (Single Query Track)

    - + cvc5 0 27267784.75767728.5372721531192828 @@ -235,7 +235,7 @@

    QF_UFBV (Single Query Track)

    - + z3-4.8.17n 0 26667375.54467351.992661461203434 @@ -244,7 +244,7 @@

    QF_UFBV (Single Query Track)

    - + Yices2 0 25977905.85977909.542259160994141 @@ -253,7 +253,7 @@

    QF_UFBV (Single Query Track)

    - + MathSATn 0 24294079.84494073.1832421421005858 @@ -273,7 +273,7 @@

    QF_UFBV (Single Query Track)

    - + 2020-Bitwuzlan 0 1604485.8334486.31160160031376 @@ -282,7 +282,7 @@

    QF_UFBV (Single Query Track)

    - + Yices2 0 1604690.084690.2261601600313741 @@ -291,7 +291,7 @@

    QF_UFBV (Single Query Track)

    - + Bitwuzla 0 1605376.7275371.991160160031377 @@ -300,7 +300,7 @@

    QF_UFBV (Single Query Track)

    - + cvc5 0 15320604.53320594.96215315301013728 @@ -309,7 +309,7 @@

    QF_UFBV (Single Query Track)

    - + z3-4.8.17n 0 14627935.98127909.46514614601713734 @@ -318,7 +318,7 @@

    QF_UFBV (Single Query Track)

    - + MathSATn 0 14226995.58926984.72314214202113758 @@ -338,7 +338,7 @@

    QF_UFBV (Single Query Track)

    - + Bitwuzla 0 13312617.15812618.362133013341637 @@ -347,7 +347,7 @@

    QF_UFBV (Single Query Track)

    - + 2020-Bitwuzlan 0 12611838.88911838.7741260126111636 @@ -356,7 +356,7 @@

    QF_UFBV (Single Query Track)

    - + z3-4.8.17n 0 12039439.56339442.52512001201716334 @@ -365,7 +365,7 @@

    QF_UFBV (Single Query Track)

    - + cvc5 0 11947180.22447133.57511901191816328 @@ -374,7 +374,7 @@

    QF_UFBV (Single Query Track)

    - + MathSATn 0 10067084.25567088.4610001003716358 @@ -383,7 +383,7 @@

    QF_UFBV (Single Query Track)

    - + Yices2 0 9973215.77973219.316990993816341 @@ -403,7 +403,7 @@

    QF_UFBV (Single Query Track)

    - + Bitwuzla 0 2043115.4423110.477204134709696 @@ -412,7 +412,7 @@

    QF_UFBV (Single Query Track)

    - + Yices2 0 2002775.1062775.1920015347100100 @@ -421,7 +421,7 @@

    QF_UFBV (Single Query Track)

    - + 2020-Bitwuzlan 0 2002989.8362988.5352001316910092 @@ -430,7 +430,7 @@

    QF_UFBV (Single Query Track)

    - + MathSATn 0 1593839.7653827.16315911940141141 @@ -439,7 +439,7 @@

    QF_UFBV (Single Query Track)

    - + z3-4.8.17n 0 1513870.853870.63315110942149149 @@ -448,7 +448,7 @@

    QF_UFBV (Single Query Track)

    - + cvc5 0 1474676.4054664.79614710938153153 @@ -472,7 +472,6 @@

    QF_UFBV (Single Query Track)

    - + - diff --git a/archive/2022/results/qf-ufbv-unsat-core.html b/archive/2022/results/qf-ufbv-unsat-core.html index feda33b0..e69f30fb 100644 --- a/archive/2022/results/qf-ufbv-unsat-core.html +++ b/archive/2022/results/qf-ufbv-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFBV (Unsat Core Track)

    Competition results for the QF_UFBV - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_UFBV (Unsat Core Track)

    Sequential PerformanceParallel Performance BitwuzlaBitwuzla - - + + @@ -126,7 +126,7 @@

    QF_UFBV (Unsat Core Track)

    - + Bitwuzla 0 977620 @@ -137,7 +137,7 @@

    QF_UFBV (Unsat Core Track)

    - + 2021-Bitwuzlan 0 975164 @@ -148,7 +148,7 @@

    QF_UFBV (Unsat Core Track)

    - + z3-4.8.17n 0 708561 @@ -159,7 +159,7 @@

    QF_UFBV (Unsat Core Track)

    - + Yices2 0 703118 @@ -170,7 +170,7 @@

    QF_UFBV (Unsat Core Track)

    - + cvc5 0 10231 @@ -181,7 +181,7 @@

    QF_UFBV (Unsat Core Track)

    - + MathSATn 0 0 @@ -203,7 +203,7 @@

    QF_UFBV (Unsat Core Track)

    - + Bitwuzla 0 97762013832.29313834.2242 @@ -212,7 +212,7 @@

    QF_UFBV (Unsat Core Track)

    - + 2021-Bitwuzlan 0 97516417202.71417219.3522 @@ -221,7 +221,7 @@

    QF_UFBV (Unsat Core Track)

    - + z3-4.8.17n 0 70856158356.10358355.96247 @@ -230,7 +230,7 @@

    QF_UFBV (Unsat Core Track)

    - + Yices2 0 70311856951.93556952.7546 @@ -239,7 +239,7 @@

    QF_UFBV (Unsat Core Track)

    - + cvc5 0 1023147011.57647002.6428 @@ -248,7 +248,7 @@

    QF_UFBV (Unsat Core Track)

    - + MathSATn 0 028.68828.7520 @@ -272,7 +272,6 @@

    QF_UFBV (Unsat Core Track)

    - + - diff --git a/archive/2022/results/qf-ufdt-proof-exhibition.html b/archive/2022/results/qf-ufdt-proof-exhibition.html index f47e2fe0..c063550d 100644 --- a/archive/2022/results/qf-ufdt-proof-exhibition.html +++ b/archive/2022/results/qf-ufdt-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFDT (Proof Exhibition Track)

    Competition results for the QF_UFDT - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_UFDT (Proof Exhibition Track)

    - + smtinterpol 0 13 @@ -130,7 +130,7 @@

    QF_UFDT (Proof Exhibition Track)

    - + cvc5 0 0 @@ -141,7 +141,7 @@

    QF_UFDT (Proof Exhibition Track)

    - + cvc5-lfsc 0 0 @@ -163,7 +163,7 @@

    QF_UFDT (Proof Exhibition Track)

    - + smtinterpol 0 13116325.342113009.6058787 @@ -172,7 +172,7 @@

    QF_UFDT (Proof Exhibition Track)

    - + cvc5 0 0120000.0120000.010099 @@ -181,7 +181,7 @@

    QF_UFDT (Proof Exhibition Track)

    - + cvc5-lfsc 0 0120000.0120000.010099 @@ -205,7 +205,6 @@

    QF_UFDT (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/qf-ufdt-single-query.html b/archive/2022/results/qf-ufdt-single-query.html index d813fae5..be7462b6 100644 --- a/archive/2022/results/qf-ufdt-single-query.html +++ b/archive/2022/results/qf-ufdt-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFDT (Single Query Track)

    Competition results for the QF_UFDT - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_UFDT (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + — - + @@ -131,7 +131,7 @@

    QF_UFDT (Single Query Track)

    - + cvc5 0 105 @@ -142,7 +142,7 @@

    QF_UFDT (Single Query Track)

    - + z3-4.8.17n 0 101 @@ -153,7 +153,7 @@

    QF_UFDT (Single Query Track)

    - + 2021-z3n 0 101 @@ -164,7 +164,7 @@

    QF_UFDT (Single Query Track)

    - + smtinterpol 0 10 @@ -186,7 +186,7 @@

    QF_UFDT (Single Query Track)

    - + cvc5 0 105165707.829165715.60110530759595 @@ -195,7 +195,7 @@

    QF_UFDT (Single Query Track)

    - + z3-4.8.17n 0 101156628.411156632.5031017949999 @@ -204,7 +204,7 @@

    QF_UFDT (Single Query Track)

    - + 2021-z3n 0 101158908.46158911.4041017949999 @@ -213,7 +213,7 @@

    QF_UFDT (Single Query Track)

    - + smtinterpol 0 13237033.585233683.71613013187187 @@ -233,7 +233,7 @@

    QF_UFDT (Single Query Track)

    - + cvc5 0 3099885.25699888.936303007010095 @@ -242,7 +242,7 @@

    QF_UFDT (Single Query Track)

    - + z3-4.8.17n 0 7115611.475115611.9987709310099 @@ -251,7 +251,7 @@

    QF_UFDT (Single Query Track)

    - + 2021-z3n 0 7116077.194116077.6477709310099 @@ -260,7 +260,7 @@

    QF_UFDT (Single Query Track)

    - + smtinterpol 0 0120000.0120000.0000100100187 @@ -280,7 +280,7 @@

    QF_UFDT (Single Query Track)

    - + z3-4.8.17n 0 9441016.93641020.50594094610099 @@ -289,7 +289,7 @@

    QF_UFDT (Single Query Track)

    - + 2021-z3n 0 9442831.26642833.75794094610099 @@ -298,7 +298,7 @@

    QF_UFDT (Single Query Track)

    - + cvc5 0 7565822.57365826.665750752510095 @@ -307,7 +307,7 @@

    QF_UFDT (Single Query Track)

    - + smtinterpol 0 13117033.585113683.7161301387100187 @@ -327,7 +327,7 @@

    QF_UFDT (Single Query Track)

    - + 2021-z3n 0 04800.04800.0000200200 @@ -336,7 +336,7 @@

    QF_UFDT (Single Query Track)

    - + cvc5 0 04800.04800.0000200200 @@ -345,7 +345,7 @@

    QF_UFDT (Single Query Track)

    - + smtinterpol 0 04800.04800.0000200200 @@ -354,7 +354,7 @@

    QF_UFDT (Single Query Track)

    - + z3-4.8.17n 0 04800.04800.0000200200 @@ -378,7 +378,6 @@

    QF_UFDT (Single Query Track)

    - + - diff --git a/archive/2022/results/qf-ufdt-unsat-core.html b/archive/2022/results/qf-ufdt-unsat-core.html index e926211a..2ab69ca5 100644 --- a/archive/2022/results/qf-ufdt-unsat-core.html +++ b/archive/2022/results/qf-ufdt-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFDT (Unsat Core Track)

    Competition results for the QF_UFDT - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_UFDT (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    QF_UFDT (Unsat Core Track)

    - + z3-4.8.17n 0 667626 @@ -137,7 +137,7 @@

    QF_UFDT (Unsat Core Track)

    - + 2021-z3n 0 654015 @@ -148,7 +148,7 @@

    QF_UFDT (Unsat Core Track)

    - + cvc5 0 113438 @@ -159,7 +159,7 @@

    QF_UFDT (Unsat Core Track)

    - + smtinterpol 0 18704 @@ -181,7 +181,7 @@

    QF_UFDT (Unsat Core Track)

    - + z3-4.8.17n 0 66762645045.08845054.117 @@ -190,7 +190,7 @@

    QF_UFDT (Unsat Core Track)

    - + 2021-z3n 0 65401549790.80550036.52221 @@ -199,7 +199,7 @@

    QF_UFDT (Unsat Core Track)

    - + cvc5 0 113438106355.477106357.5178 @@ -208,7 +208,7 @@

    QF_UFDT (Unsat Core Track)

    - + smtinterpol 0 27151116671.989113636.04487 @@ -232,7 +232,6 @@

    QF_UFDT (Unsat Core Track)

    - + - diff --git a/archive/2022/results/qf-ufdtlira-proof-exhibition.html b/archive/2022/results/qf-ufdtlira-proof-exhibition.html index 8592e51c..2d4bcbc6 100644 --- a/archive/2022/results/qf-ufdtlira-proof-exhibition.html +++ b/archive/2022/results/qf-ufdtlira-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFDTLIRA (Proof Exhibition Track)

    Competition results for the QF_UFDTLIRA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_UFDTLIRA (Proof Exhibition Track)

    - + cvc5 0 66 @@ -130,7 +130,7 @@

    QF_UFDTLIRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 66 @@ -141,7 +141,7 @@

    QF_UFDTLIRA (Proof Exhibition Track)

    - + smtinterpol 0 66 @@ -163,7 +163,7 @@

    QF_UFDTLIRA (Proof Exhibition Track)

    - + cvc5 0 663.4523.38800 @@ -172,7 +172,7 @@

    QF_UFDTLIRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 664.1334.07700 @@ -181,7 +181,7 @@

    QF_UFDTLIRA (Proof Exhibition Track)

    - + smtinterpol 0 6650.60526.97400 @@ -205,7 +205,6 @@

    QF_UFDTLIRA (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/qf-ufdtlira-single-query.html b/archive/2022/results/qf-ufdtlira-single-query.html index 6982f2a6..6487d374 100644 --- a/archive/2022/results/qf-ufdtlira-single-query.html +++ b/archive/2022/results/qf-ufdtlira-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFDTLIRA (Single Query Track)

    Competition results for the QF_UFDTLIRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_UFDTLIRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    QF_UFDTLIRA (Single Query Track)

    - + cvc5 0 9 @@ -142,7 +142,7 @@

    QF_UFDTLIRA (Single Query Track)

    - + z3-4.8.17n 0 9 @@ -153,7 +153,7 @@

    QF_UFDTLIRA (Single Query Track)

    - + smtinterpol 0 9 @@ -164,7 +164,7 @@

    QF_UFDTLIRA (Single Query Track)

    - + 2021-SMTInterpoln 0 0 @@ -186,7 +186,7 @@

    QF_UFDTLIRA (Single Query Track)

    - + cvc5 0 90.2820.27795400 @@ -195,7 +195,7 @@

    QF_UFDTLIRA (Single Query Track)

    - + z3-4.8.17n 0 90.3550.33895400 @@ -204,7 +204,7 @@

    QF_UFDTLIRA (Single Query Track)

    - + smtinterpol 0 96.0283.81895400 @@ -213,7 +213,7 @@

    QF_UFDTLIRA (Single Query Track)

    - + 2021-SMTInterpoln 0 03.9842.87600090 @@ -233,7 +233,7 @@

    QF_UFDTLIRA (Single Query Track)

    - + cvc5 0 50.1630.161550040 @@ -242,7 +242,7 @@

    QF_UFDTLIRA (Single Query Track)

    - + z3-4.8.17n 0 50.2160.206550040 @@ -251,7 +251,7 @@

    QF_UFDTLIRA (Single Query Track)

    - + smtinterpol 0 53.2292.057550040 @@ -260,7 +260,7 @@

    QF_UFDTLIRA (Single Query Track)

    - + 2021-SMTInterpoln 0 02.2151.611000540 @@ -280,7 +280,7 @@

    QF_UFDTLIRA (Single Query Track)

    - + cvc5 0 40.1190.117404050 @@ -289,7 +289,7 @@

    QF_UFDTLIRA (Single Query Track)

    - + z3-4.8.17n 0 40.1390.131404050 @@ -298,7 +298,7 @@

    QF_UFDTLIRA (Single Query Track)

    - + smtinterpol 0 42.7991.761404050 @@ -307,7 +307,7 @@

    QF_UFDTLIRA (Single Query Track)

    - + 2021-SMTInterpoln 0 01.7691.265000450 @@ -327,7 +327,7 @@

    QF_UFDTLIRA (Single Query Track)

    - + cvc5 0 90.2820.27795400 @@ -336,7 +336,7 @@

    QF_UFDTLIRA (Single Query Track)

    - + z3-4.8.17n 0 90.3550.33895400 @@ -345,7 +345,7 @@

    QF_UFDTLIRA (Single Query Track)

    - + smtinterpol 0 96.0283.81895400 @@ -354,7 +354,7 @@

    QF_UFDTLIRA (Single Query Track)

    - + 2021-SMTInterpoln 0 03.9842.87600090 @@ -378,7 +378,6 @@

    QF_UFDTLIRA (Single Query Track)

    - + - diff --git a/archive/2022/results/qf-ufdtlira-unsat-core.html b/archive/2022/results/qf-ufdtlira-unsat-core.html index 373eb53f..f83dc5fb 100644 --- a/archive/2022/results/qf-ufdtlira-unsat-core.html +++ b/archive/2022/results/qf-ufdtlira-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFDTLIRA (Unsat Core Track)

    Competition results for the QF_UFDTLIRA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_UFDTLIRA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    QF_UFDTLIRA (Unsat Core Track)

    - + cvc5 0 162 @@ -137,7 +137,7 @@

    QF_UFDTLIRA (Unsat Core Track)

    - + z3-4.8.17n 0 162 @@ -148,7 +148,7 @@

    QF_UFDTLIRA (Unsat Core Track)

    - + smtinterpol 0 162 @@ -170,7 +170,7 @@

    QF_UFDTLIRA (Unsat Core Track)

    - + cvc5 0 1620.450.4480 @@ -179,7 +179,7 @@

    QF_UFDTLIRA (Unsat Core Track)

    - + z3-4.8.17n 0 1620.4920.4650 @@ -188,7 +188,7 @@

    QF_UFDTLIRA (Unsat Core Track)

    - + smtinterpol 0 16210.9447.1590 @@ -212,7 +212,6 @@

    QF_UFDTLIRA (Unsat Core Track)

    - + - diff --git a/archive/2022/results/qf-uffp-incremental.html b/archive/2022/results/qf-uffp-incremental.html index df4174f3..27249a19 100644 --- a/archive/2022/results/qf-uffp-incremental.html +++ b/archive/2022/results/qf-uffp-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFFP (Incremental Track)

    Competition results for the QF_UFFP - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_UFFP (Incremental Track)

    Parallel Performance Bitwuzla - - + + @@ -124,7 +124,7 @@

    QF_UFFP (Incremental Track)

    - + Bitwuzla 0 20.140.1400 @@ -133,7 +133,7 @@

    QF_UFFP (Incremental Track)

    - + 2021-Bitwuzla - fixedn 0 20.1640.16400 @@ -142,7 +142,7 @@

    QF_UFFP (Incremental Track)

    - + cvc5 0 20.5570.55600 @@ -151,7 +151,7 @@

    QF_UFFP (Incremental Track)

    - + MathSATn 0 21.2151.21400 @@ -175,7 +175,6 @@

    QF_UFFP (Incremental Track)

    - + - diff --git a/archive/2022/results/qf-uffp-proof-exhibition.html b/archive/2022/results/qf-uffp-proof-exhibition.html index f7918284..6c46ae9b 100644 --- a/archive/2022/results/qf-uffp-proof-exhibition.html +++ b/archive/2022/results/qf-uffp-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFFP (Proof Exhibition Track)

    Competition results for the QF_UFFP - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_UFFP (Proof Exhibition Track)

    - + cvc5 0 0 @@ -130,7 +130,7 @@

    QF_UFFP (Proof Exhibition Track)

    - + cvc5-lfsc 0 0 @@ -152,7 +152,7 @@

    QF_UFFP (Proof Exhibition Track)

    - + cvc5 0 02400.02400.022 @@ -161,7 +161,7 @@

    QF_UFFP (Proof Exhibition Track)

    - + cvc5-lfsc 0 02400.02400.022 @@ -185,7 +185,6 @@

    QF_UFFP (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/qf-uffp-unsat-core.html b/archive/2022/results/qf-uffp-unsat-core.html index 89f3bcc4..ce96092e 100644 --- a/archive/2022/results/qf-uffp-unsat-core.html +++ b/archive/2022/results/qf-uffp-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFFP (Unsat Core Track)

    Competition results for the QF_UFFP - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_UFFP (Unsat Core Track)

    Sequential PerformanceParallel Performance —— - - + + @@ -126,7 +126,7 @@

    QF_UFFP (Unsat Core Track)

    - + MathSATn 0 0 @@ -137,7 +137,7 @@

    QF_UFFP (Unsat Core Track)

    - + cvc5 0 0 @@ -159,7 +159,7 @@

    QF_UFFP (Unsat Core Track)

    - + MathSATn 0 00.0330.0330 @@ -168,7 +168,7 @@

    QF_UFFP (Unsat Core Track)

    - + cvc5 0 02400.02400.02 @@ -192,7 +192,6 @@

    QF_UFFP (Unsat Core Track)

    - + - diff --git a/archive/2022/results/qf-uffpdtnira-proof-exhibition.html b/archive/2022/results/qf-uffpdtnira-proof-exhibition.html index c9950acf..287cb15c 100644 --- a/archive/2022/results/qf-uffpdtnira-proof-exhibition.html +++ b/archive/2022/results/qf-uffpdtnira-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFFPDTNIRA (Proof Exhibition Track)

    Competition results for the QF_UFFPDTNIRA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_UFFPDTNIRA (Proof Exhibition Track)

    - + cvc5 0 43 @@ -130,7 +130,7 @@

    QF_UFFPDTNIRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 43 @@ -152,7 +152,7 @@

    QF_UFFPDTNIRA (Proof Exhibition Track)

    - + cvc5 0 43102003.471102003.4268585 @@ -161,7 +161,7 @@

    QF_UFFPDTNIRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 43102003.943102003.9238585 @@ -185,7 +185,6 @@

    QF_UFFPDTNIRA (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/qf-ufidl-model-validation.html b/archive/2022/results/qf-ufidl-model-validation.html index fb0fafc9..df65cfd1 100644 --- a/archive/2022/results/qf-ufidl-model-validation.html +++ b/archive/2022/results/qf-ufidl-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFIDL (Model Validation Track)

    Competition results for the QF_UFIDL - + logic - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_UFIDL (Model Validation Track)

    Sequential PerformanceParallel Performance smtinterpolsmtinterpol - - + + @@ -126,7 +126,7 @@

    QF_UFIDL (Model Validation Track)

    - + 2021-SMTInterpoln 0 198 @@ -137,7 +137,7 @@

    QF_UFIDL (Model Validation Track)

    - + smtinterpol 0 198 @@ -148,7 +148,7 @@

    QF_UFIDL (Model Validation Track)

    - + z3-4.8.17n 0 178 @@ -159,7 +159,7 @@

    QF_UFIDL (Model Validation Track)

    - + cvc5 0 172 @@ -170,7 +170,7 @@

    QF_UFIDL (Model Validation Track)

    - + Yices2 0 143 @@ -181,7 +181,7 @@

    QF_UFIDL (Model Validation Track)

    - + MathSATn 0 106 @@ -203,7 +203,7 @@

    QF_UFIDL (Model Validation Track)

    - + 2021-SMTInterpoln 0 19820639.2519212.2768 @@ -212,7 +212,7 @@

    QF_UFIDL (Model Validation Track)

    - + smtinterpol 0 19820825.31419217.498 @@ -221,7 +221,7 @@

    QF_UFIDL (Model Validation Track)

    - + z3-4.8.17n 0 17835512.70535479.96328 @@ -230,7 +230,7 @@

    QF_UFIDL (Model Validation Track)

    - + cvc5 0 17268471.92868478.94634 @@ -239,7 +239,7 @@

    QF_UFIDL (Model Validation Track)

    - + Yices2 0 14391637.73591639.89963 @@ -248,7 +248,7 @@

    QF_UFIDL (Model Validation Track)

    - + MathSATn 0 106120051.6120051.626100 @@ -272,7 +272,6 @@

    QF_UFIDL (Model Validation Track)

    - + - diff --git a/archive/2022/results/qf-ufidl-proof-exhibition.html b/archive/2022/results/qf-ufidl-proof-exhibition.html index cf4686c4..93b72cb0 100644 --- a/archive/2022/results/qf-ufidl-proof-exhibition.html +++ b/archive/2022/results/qf-ufidl-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFIDL (Proof Exhibition Track)

    Competition results for the QF_UFIDL - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_UFIDL (Proof Exhibition Track)

    - + smtinterpol 0 213 @@ -130,7 +130,7 @@

    QF_UFIDL (Proof Exhibition Track)

    - + cvc5-lfsc 0 88 @@ -141,7 +141,7 @@

    QF_UFIDL (Proof Exhibition Track)

    - + veriT 0 37 @@ -152,7 +152,7 @@

    QF_UFIDL (Proof Exhibition Track)

    - + cvc5 0 9 @@ -174,7 +174,7 @@

    QF_UFIDL (Proof Exhibition Track)

    - + smtinterpol 0 213120549.06111610.3088785 @@ -183,7 +183,7 @@

    QF_UFIDL (Proof Exhibition Track)

    - + cvc5-lfsc 0 88270307.604270274.811212212 @@ -192,7 +192,7 @@

    QF_UFIDL (Proof Exhibition Track)

    - + veriT 0 37127255.662127083.55526389 @@ -201,7 +201,7 @@

    QF_UFIDL (Proof Exhibition Track)

    - + cvc5 0 9346878.264346858.832291288 @@ -225,7 +225,6 @@

    QF_UFIDL (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/qf-ufidl-single-query.html b/archive/2022/results/qf-ufidl-single-query.html index 363c3863..2dc9577f 100644 --- a/archive/2022/results/qf-ufidl-single-query.html +++ b/archive/2022/results/qf-ufidl-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFIDL (Single Query Track)

    Competition results for the QF_UFIDL - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_UFIDL (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Yices2Yices2smtinterpol - - + + Yices2 - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_UFIDL (Single Query Track)

    - + z3-4.8.17n 0 260 @@ -142,7 +142,7 @@

    QF_UFIDL (Single Query Track)

    - + 2021-SMTInterpoln 0 251 @@ -153,7 +153,7 @@

    QF_UFIDL (Single Query Track)

    - + Yices2 0 246 @@ -164,7 +164,7 @@

    QF_UFIDL (Single Query Track)

    - + cvc5 0 241 @@ -175,7 +175,7 @@

    QF_UFIDL (Single Query Track)

    - + smtinterpol 0 239 @@ -186,7 +186,7 @@

    QF_UFIDL (Single Query Track)

    - + MathSATn 0 227 @@ -197,7 +197,7 @@

    QF_UFIDL (Single Query Track)

    - + veriT 0 225 @@ -219,7 +219,7 @@

    QF_UFIDL (Single Query Track)

    - + z3-4.8.17n 0 26065076.30265063.602260701904040 @@ -228,7 +228,7 @@

    QF_UFIDL (Single Query Track)

    - + 2021-SMTInterpoln 0 25780821.67975397.541257831744343 @@ -237,7 +237,7 @@

    QF_UFIDL (Single Query Track)

    - + Yices2 0 24681767.61481769.267246561905454 @@ -246,7 +246,7 @@

    QF_UFIDL (Single Query Track)

    - + smtinterpol 0 24294887.62186253.554242831595858 @@ -255,7 +255,7 @@

    QF_UFIDL (Single Query Track)

    - + cvc5 0 241107669.854107674.76241701715959 @@ -264,7 +264,7 @@

    QF_UFIDL (Single Query Track)

    - + MathSATn 0 227106364.457106302.945227381897373 @@ -273,7 +273,7 @@

    QF_UFIDL (Single Query Track)

    - + veriT 0 22598863.83698863.796225731527572 @@ -293,7 +293,7 @@

    QF_UFIDL (Single Query Track)

    - + 2021-SMTInterpoln 0 837851.8377175.13483830221543 @@ -302,7 +302,7 @@

    QF_UFIDL (Single Query Track)

    - + smtinterpol 0 837976.8897224.82183830221558 @@ -311,7 +311,7 @@

    QF_UFIDL (Single Query Track)

    - + veriT 0 7318941.70718941.043737301221572 @@ -320,7 +320,7 @@

    QF_UFIDL (Single Query Track)

    - + z3-4.8.17n 0 7019444.27419443.365707001521540 @@ -329,7 +329,7 @@

    QF_UFIDL (Single Query Track)

    - + cvc5 0 7031063.11731065.364707001521559 @@ -338,7 +338,7 @@

    QF_UFIDL (Single Query Track)

    - + Yices2 0 5640686.23440686.988565602921554 @@ -347,7 +347,7 @@

    QF_UFIDL (Single Query Track)

    - + MathSATn 0 3856438.43856435.266383804721573 @@ -367,7 +367,7 @@

    QF_UFIDL (Single Query Track)

    - + Yices2 0 19041081.38141082.2791900190258554 @@ -376,7 +376,7 @@

    QF_UFIDL (Single Query Track)

    - + z3-4.8.17n 0 19045632.02845620.2371900190258540 @@ -385,7 +385,7 @@

    QF_UFIDL (Single Query Track)

    - + MathSATn 0 18949926.01949867.6791890189268573 @@ -394,7 +394,7 @@

    QF_UFIDL (Single Query Track)

    - + 2021-SMTInterpoln 0 17472969.84268222.4071740174418543 @@ -403,7 +403,7 @@

    QF_UFIDL (Single Query Track)

    - + cvc5 0 17176606.73676609.3961710171448559 @@ -412,7 +412,7 @@

    QF_UFIDL (Single Query Track)

    - + smtinterpol 0 15986910.73279028.7321590159568558 @@ -421,7 +421,7 @@

    QF_UFIDL (Single Query Track)

    - + veriT 0 15279922.1379922.7521520152638572 @@ -441,7 +441,7 @@

    QF_UFIDL (Single Query Track)

    - + z3-4.8.17n 0 2222214.9392200.243222661567878 @@ -450,7 +450,7 @@

    QF_UFIDL (Single Query Track)

    - + Yices2 0 1992584.8972584.99719939160101101 @@ -459,7 +459,7 @@

    QF_UFIDL (Single Query Track)

    - + veriT 0 1982854.9672853.88719863135102102 @@ -468,7 +468,7 @@

    QF_UFIDL (Single Query Track)

    - + 2021-SMTInterpoln 0 1895311.6073824.83118962127111111 @@ -477,7 +477,7 @@

    QF_UFIDL (Single Query Track)

    - + smtinterpol 0 1815560.8874052.38218163118119119 @@ -486,7 +486,7 @@

    QF_UFIDL (Single Query Track)

    - + MathSATn 0 1673788.4443761.61916738129133133 @@ -495,7 +495,7 @@

    QF_UFIDL (Single Query Track)

    - + cvc5 0 1334510.2294510.3091333895167167 @@ -519,7 +519,6 @@

    QF_UFIDL (Single Query Track)

    - + - diff --git a/archive/2022/results/qf-ufidl-unsat-core.html b/archive/2022/results/qf-ufidl-unsat-core.html index f792f6f9..39c8e5bc 100644 --- a/archive/2022/results/qf-ufidl-unsat-core.html +++ b/archive/2022/results/qf-ufidl-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFIDL (Unsat Core Track)

    Competition results for the QF_UFIDL - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_UFIDL (Unsat Core Track)

    Sequential PerformanceParallel Performance Yices2Yices2 - - + + @@ -126,7 +126,7 @@

    QF_UFIDL (Unsat Core Track)

    - + Yices2 0 802882 @@ -137,7 +137,7 @@

    QF_UFIDL (Unsat Core Track)

    - + MathSATn 0 797716 @@ -148,7 +148,7 @@

    QF_UFIDL (Unsat Core Track)

    - + 2021-MathSAT5n 0 778665 @@ -159,7 +159,7 @@

    QF_UFIDL (Unsat Core Track)

    - + z3-4.8.17n 0 445009 @@ -170,7 +170,7 @@

    QF_UFIDL (Unsat Core Track)

    - + smtinterpol 0 185088 @@ -181,7 +181,7 @@

    QF_UFIDL (Unsat Core Track)

    - + cvc5 0 29766 @@ -203,7 +203,7 @@

    QF_UFIDL (Unsat Core Track)

    - + Yices2 0 80288272825.89272829.86438 @@ -212,7 +212,7 @@

    QF_UFIDL (Unsat Core Track)

    - + MathSATn 0 79771660449.06360455.07635 @@ -221,7 +221,7 @@

    QF_UFIDL (Unsat Core Track)

    - + 2021-MathSAT5n 0 77866562312.44362452.57237 @@ -230,7 +230,7 @@

    QF_UFIDL (Unsat Core Track)

    - + z3-4.8.17n 0 44500963229.33663234.76729 @@ -239,7 +239,7 @@

    QF_UFIDL (Unsat Core Track)

    - + smtinterpol 0 210908113157.206107652.08782 @@ -248,7 +248,7 @@

    QF_UFIDL (Unsat Core Track)

    - + cvc5 0 29766116300.171116300.58994 @@ -272,7 +272,6 @@

    QF_UFIDL (Unsat Core Track)

    - + - diff --git a/archive/2022/results/qf-uflia-incremental.html b/archive/2022/results/qf-uflia-incremental.html index ec0cd980..1984ffaf 100644 --- a/archive/2022/results/qf-uflia-incremental.html +++ b/archive/2022/results/qf-uflia-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFLIA (Incremental Track)

    Competition results for the QF_UFLIA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_UFLIA (Incremental Track)

    Parallel Performance Yices2 - - + + @@ -124,7 +124,7 @@

    QF_UFLIA (Incremental Track)

    - + z3-4.8.17n 0 48862485744.80685728.96593067 @@ -133,7 +133,7 @@

    QF_UFLIA (Incremental Track)

    - + 2020-z3n 0 48823085369.55685353.732632467 @@ -142,7 +142,7 @@

    QF_UFLIA (Incremental Track)

    - + MathSATn 0 485973117016.092116996.636858195 @@ -151,7 +151,7 @@

    QF_UFLIA (Incremental Track)

    - + Yices2 0 48451290954.81590941.2521004271 @@ -160,7 +160,7 @@

    QF_UFLIA (Incremental Track)

    - + smtinterpol 0 484118117873.049115834.6191043691 @@ -169,7 +169,7 @@

    QF_UFLIA (Incremental Track)

    - + cvc5 0 372893112009.61112008.28312166187 @@ -178,7 +178,7 @@

    QF_UFLIA (Incremental Track)

    - + OpenSMT 0 22173119971.106119913.88147238195 @@ -202,7 +202,6 @@

    QF_UFLIA (Incremental Track)

    - + - diff --git a/archive/2022/results/qf-uflia-model-validation.html b/archive/2022/results/qf-uflia-model-validation.html index b3682fb8..466ca352 100644 --- a/archive/2022/results/qf-uflia-model-validation.html +++ b/archive/2022/results/qf-uflia-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFLIA (Model Validation Track)

    Competition results for the QF_UFLIA - + logic - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_UFLIA (Model Validation Track)

    Sequential PerformanceParallel Performance Yices2Yices2 - - + + @@ -126,7 +126,7 @@

    QF_UFLIA (Model Validation Track)

    - + Yices2 0 300 @@ -137,7 +137,7 @@

    QF_UFLIA (Model Validation Track)

    - + z3-4.8.17n 0 300 @@ -148,7 +148,7 @@

    QF_UFLIA (Model Validation Track)

    - + cvc5 0 300 @@ -159,7 +159,7 @@

    QF_UFLIA (Model Validation Track)

    - + OpenSMT 0 300 @@ -170,7 +170,7 @@

    QF_UFLIA (Model Validation Track)

    - + smtinterpol 0 300 @@ -181,7 +181,7 @@

    QF_UFLIA (Model Validation Track)

    - + MathSATn 0 300 @@ -192,7 +192,7 @@

    QF_UFLIA (Model Validation Track)

    - + 2021-SMTInterpoln 0 300 @@ -214,7 +214,7 @@

    QF_UFLIA (Model Validation Track)

    - + Yices2 0 3005.2576.3310 @@ -223,7 +223,7 @@

    QF_UFLIA (Model Validation Track)

    - + z3-4.8.17n 0 30028.32627.0230 @@ -232,7 +232,7 @@

    QF_UFLIA (Model Validation Track)

    - + cvc5 0 300177.35177.4710 @@ -241,7 +241,7 @@

    QF_UFLIA (Model Validation Track)

    - + smtinterpol 0 3001168.691513.8850 @@ -250,7 +250,7 @@

    QF_UFLIA (Model Validation Track)

    - + OpenSMT 0 300708.15686.1020 @@ -259,7 +259,7 @@

    QF_UFLIA (Model Validation Track)

    - + 2021-SMTInterpoln 0 3002044.7181324.0280 @@ -268,7 +268,7 @@

    QF_UFLIA (Model Validation Track)

    - + MathSATn 0 3001337.8071338.0680 @@ -292,7 +292,6 @@

    QF_UFLIA (Model Validation Track)

    - + - diff --git a/archive/2022/results/qf-uflia-proof-exhibition.html b/archive/2022/results/qf-uflia-proof-exhibition.html index 9f9b658e..a76b5e6a 100644 --- a/archive/2022/results/qf-uflia-proof-exhibition.html +++ b/archive/2022/results/qf-uflia-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFLIA (Proof Exhibition Track)

    Competition results for the QF_UFLIA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_UFLIA (Proof Exhibition Track)

    - + smtinterpol 0 183 @@ -130,7 +130,7 @@

    QF_UFLIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 182 @@ -141,7 +141,7 @@

    QF_UFLIA (Proof Exhibition Track)

    - + veriT 0 180 @@ -152,7 +152,7 @@

    QF_UFLIA (Proof Exhibition Track)

    - + cvc5 0 94 @@ -174,7 +174,7 @@

    QF_UFLIA (Proof Exhibition Track)

    - + smtinterpol 0 1831392.832803.09700 @@ -183,7 +183,7 @@

    QF_UFLIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 1824007.7914005.87310 @@ -192,7 +192,7 @@

    QF_UFLIA (Proof Exhibition Track)

    - + veriT 0 1806518.1036514.77833 @@ -201,7 +201,7 @@

    QF_UFLIA (Proof Exhibition Track)

    - + cvc5 0 94106227.441106172.7468988 @@ -225,7 +225,6 @@

    QF_UFLIA (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/qf-uflia-single-query.html b/archive/2022/results/qf-uflia-single-query.html index f657370c..79cbb701 100644 --- a/archive/2022/results/qf-uflia-single-query.html +++ b/archive/2022/results/qf-uflia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFLIA (Single Query Track)

    Competition results for the QF_UFLIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_UFLIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Yices2Yices2Yices2 - - + + OpenSMT - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_UFLIA (Single Query Track)

    - + Yices2 0 300 @@ -142,7 +142,7 @@

    QF_UFLIA (Single Query Track)

    - + z3-4.8.17n 0 300 @@ -153,7 +153,7 @@

    QF_UFLIA (Single Query Track)

    - + OpenSMT 0 300 @@ -164,7 +164,7 @@

    QF_UFLIA (Single Query Track)

    - + cvc5 0 300 @@ -175,7 +175,7 @@

    QF_UFLIA (Single Query Track)

    - + smtinterpol 0 300 @@ -186,7 +186,7 @@

    QF_UFLIA (Single Query Track)

    - + 2021-SMTInterpoln 0 299 @@ -197,7 +197,7 @@

    QF_UFLIA (Single Query Track)

    - + MathSATn 0 298 @@ -208,7 +208,7 @@

    QF_UFLIA (Single Query Track)

    - + veriT 0 263 @@ -230,7 +230,7 @@

    QF_UFLIA (Single Query Track)

    - + Yices2 0 30075.28676.2323002386200 @@ -239,7 +239,7 @@

    QF_UFLIA (Single Query Track)

    - + z3-4.8.17n 0 300145.938142.4733002386200 @@ -248,7 +248,7 @@

    QF_UFLIA (Single Query Track)

    - + OpenSMT 0 300692.196680.3763002386200 @@ -257,7 +257,7 @@

    QF_UFLIA (Single Query Track)

    - + cvc5 0 300797.754796.8523002386200 @@ -266,7 +266,7 @@

    QF_UFLIA (Single Query Track)

    - + smtinterpol 0 3002381.7691426.2093002386200 @@ -275,7 +275,7 @@

    QF_UFLIA (Single Query Track)

    - + 2021-SMTInterpoln 0 2992772.4251818.5412992386110 @@ -284,7 +284,7 @@

    QF_UFLIA (Single Query Track)

    - + MathSATn 0 2985036.8415037.2522982386022 @@ -293,7 +293,7 @@

    QF_UFLIA (Single Query Track)

    - + veriT 0 26336268.60636269.006263204593713 @@ -313,7 +313,7 @@

    QF_UFLIA (Single Query Track)

    - + Yices2 0 2386.337.0323823800620 @@ -322,7 +322,7 @@

    QF_UFLIA (Single Query Track)

    - + z3-4.8.17n 0 23834.84934.46523823800620 @@ -331,7 +331,7 @@

    QF_UFLIA (Single Query Track)

    - + cvc5 0 238250.005249.00823823800620 @@ -340,7 +340,7 @@

    QF_UFLIA (Single Query Track)

    - + OpenSMT 0 238642.962633.29723823800620 @@ -349,7 +349,7 @@

    QF_UFLIA (Single Query Track)

    - + smtinterpol 0 2381402.881740.25723823800620 @@ -358,7 +358,7 @@

    QF_UFLIA (Single Query Track)

    - + 2021-SMTInterpoln 0 2382078.8231342.49723823800620 @@ -367,7 +367,7 @@

    QF_UFLIA (Single Query Track)

    - + MathSATn 0 2382115.2362115.59523823800622 @@ -376,7 +376,7 @@

    QF_UFLIA (Single Query Track)

    - + veriT 0 20431245.4831245.6832042040346213 @@ -396,7 +396,7 @@

    QF_UFLIA (Single Query Track)

    - + OpenSMT 0 6249.23447.0796206202380 @@ -405,7 +405,7 @@

    QF_UFLIA (Single Query Track)

    - + Yices2 0 6268.95669.2016206202380 @@ -414,7 +414,7 @@

    QF_UFLIA (Single Query Track)

    - + z3-4.8.17n 0 62111.09108.0086206202380 @@ -423,7 +423,7 @@

    QF_UFLIA (Single Query Track)

    - + cvc5 0 62547.75547.8446206202380 @@ -432,7 +432,7 @@

    QF_UFLIA (Single Query Track)

    - + smtinterpol 0 62978.888685.9526206202380 @@ -441,7 +441,7 @@

    QF_UFLIA (Single Query Track)

    - + 2021-SMTInterpoln 0 61693.602476.0436106112380 @@ -450,7 +450,7 @@

    QF_UFLIA (Single Query Track)

    - + MathSATn 0 602921.6052921.6576006022382 @@ -459,7 +459,7 @@

    QF_UFLIA (Single Query Track)

    - + veriT 0 595023.1265023.32359059323813 @@ -479,7 +479,7 @@

    QF_UFLIA (Single Query Track)

    - + Yices2 0 29965.61666.5562992386111 @@ -488,7 +488,7 @@

    QF_UFLIA (Single Query Track)

    - + z3-4.8.17n 0 29991.35687.882992386111 @@ -497,7 +497,7 @@

    QF_UFLIA (Single Query Track)

    - + OpenSMT 0 299677.552665.7242992376211 @@ -506,7 +506,7 @@

    QF_UFLIA (Single Query Track)

    - + cvc5 0 293444.789443.7832932375677 @@ -515,7 +515,7 @@

    QF_UFLIA (Single Query Track)

    - + 2021-SMTInterpoln 0 2921202.135585.3172922355788 @@ -524,7 +524,7 @@

    QF_UFLIA (Single Query Track)

    - + smtinterpol 0 2891402.129705.282289232571111 @@ -533,7 +533,7 @@

    QF_UFLIA (Single Query Track)

    - + MathSATn 0 283586.336586.397283226571717 @@ -542,7 +542,7 @@

    QF_UFLIA (Single Query Track)

    - + veriT 0 1783758.1833756.18217812652122114 @@ -566,7 +566,6 @@

    QF_UFLIA (Single Query Track)

    - + - diff --git a/archive/2022/results/qf-uflia-unsat-core.html b/archive/2022/results/qf-uflia-unsat-core.html index 2b177ff6..b4831460 100644 --- a/archive/2022/results/qf-uflia-unsat-core.html +++ b/archive/2022/results/qf-uflia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFLIA (Unsat Core Track)

    Competition results for the QF_UFLIA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_UFLIA (Unsat Core Track)

    Sequential PerformanceParallel Performance Yices2Yices2 - - + + @@ -126,7 +126,7 @@

    QF_UFLIA (Unsat Core Track)

    - + Yices2 0 21 @@ -137,7 +137,7 @@

    QF_UFLIA (Unsat Core Track)

    - + MathSATn 0 21 @@ -148,7 +148,7 @@

    QF_UFLIA (Unsat Core Track)

    - + 2021-MathSAT5n 0 21 @@ -159,7 +159,7 @@

    QF_UFLIA (Unsat Core Track)

    - + cvc5 0 21 @@ -170,7 +170,7 @@

    QF_UFLIA (Unsat Core Track)

    - + z3-4.8.17n 0 21 @@ -181,7 +181,7 @@

    QF_UFLIA (Unsat Core Track)

    - + smtinterpol 0 21 @@ -203,7 +203,7 @@

    QF_UFLIA (Unsat Core Track)

    - + Yices2 0 210.0690.2040 @@ -212,7 +212,7 @@

    QF_UFLIA (Unsat Core Track)

    - + MathSATn 0 210.2770.2770 @@ -221,7 +221,7 @@

    QF_UFLIA (Unsat Core Track)

    - + cvc5 0 210.3180.310 @@ -230,7 +230,7 @@

    QF_UFLIA (Unsat Core Track)

    - + 2021-MathSAT5n 0 210.3120.3280 @@ -239,7 +239,7 @@

    QF_UFLIA (Unsat Core Track)

    - + z3-4.8.17n 0 210.4230.4020 @@ -248,7 +248,7 @@

    QF_UFLIA (Unsat Core Track)

    - + smtinterpol 0 219.6816.6090 @@ -272,7 +272,6 @@

    QF_UFLIA (Unsat Core Track)

    - + - diff --git a/archive/2022/results/qf-uflra-cloud.html b/archive/2022/results/qf-uflra-cloud.html index fb210837..b1802305 100644 --- a/archive/2022/results/qf-uflra-cloud.html +++ b/archive/2022/results/qf-uflra-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFLRA (Cloud Track)

    Competition results for the QF_UFLRA - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    QF_UFLRA (Cloud Track)

    - + SMTS portfolio 1 0953.41100010 @@ -126,7 +126,7 @@

    QF_UFLRA (Cloud Track)

    - + SMTS cube-and-conquer 1 0955.700010 @@ -135,7 +135,7 @@

    QF_UFLRA (Cloud Track)

    - + SMTS cube-and-conquer (fixed) 1 0956.27900010 @@ -144,7 +144,7 @@

    QF_UFLRA (Cloud Track)

    - + cvc5-cloud 1 01200.000010 @@ -164,7 +164,7 @@

    QF_UFLRA (Cloud Track)

    - + SMTS portfolio 1 0953.411000100 @@ -173,7 +173,7 @@

    QF_UFLRA (Cloud Track)

    - + SMTS cube-and-conquer 1 0955.7000100 @@ -182,7 +182,7 @@

    QF_UFLRA (Cloud Track)

    - + SMTS cube-and-conquer (fixed) 1 0956.279000100 @@ -191,7 +191,7 @@

    QF_UFLRA (Cloud Track)

    - + cvc5-cloud 1 01200.0000100 @@ -211,7 +211,7 @@

    QF_UFLRA (Cloud Track)

    - + SMTS cube-and-conquer 0 00.0000010 @@ -220,7 +220,7 @@

    QF_UFLRA (Cloud Track)

    - + SMTS cube-and-conquer (fixed) 0 00.0000010 @@ -229,7 +229,7 @@

    QF_UFLRA (Cloud Track)

    - + SMTS portfolio 0 00.0000010 @@ -238,7 +238,7 @@

    QF_UFLRA (Cloud Track)

    - + cvc5-cloud 0 00.0000010 @@ -258,7 +258,7 @@

    QF_UFLRA (Cloud Track)

    - + SMTS cube-and-conquer 0 024.000011 @@ -267,7 +267,7 @@

    QF_UFLRA (Cloud Track)

    - + SMTS cube-and-conquer (fixed) 0 024.000011 @@ -276,7 +276,7 @@

    QF_UFLRA (Cloud Track)

    - + SMTS portfolio 0 024.000011 @@ -285,7 +285,7 @@

    QF_UFLRA (Cloud Track)

    - + cvc5-cloud 0 024.000011 @@ -309,7 +309,6 @@

    QF_UFLRA (Cloud Track)

    - + - diff --git a/archive/2022/results/qf-uflra-incremental.html b/archive/2022/results/qf-uflra-incremental.html index 55c31d53..0bb0a795 100644 --- a/archive/2022/results/qf-uflra-incremental.html +++ b/archive/2022/results/qf-uflra-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFLRA (Incremental Track)

    Competition results for the QF_UFLRA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_UFLRA (Incremental Track)

    Parallel Performance Yices2 - - + + @@ -124,7 +124,7 @@

    QF_UFLRA (Incremental Track)

    - + z3-4.8.17n 0 815412862.10712518.2410 @@ -133,7 +133,7 @@

    QF_UFLRA (Incremental Track)

    - + 2020-z3n 0 815413590.37513268.09910 @@ -142,7 +142,7 @@

    QF_UFLRA (Incremental Track)

    - + MathSATn 0 814715540.39615175.92384 @@ -151,7 +151,7 @@

    QF_UFLRA (Incremental Track)

    - + Yices2 0 813317179.16416861.285226 @@ -160,7 +160,7 @@

    QF_UFLRA (Incremental Track)

    - + cvc5 0 811469450.72468960.544122 @@ -169,7 +169,7 @@

    QF_UFLRA (Incremental Track)

    - + OpenSMT 0 806986148.41585876.9838646 @@ -178,7 +178,7 @@

    QF_UFLRA (Incremental Track)

    - + smtinterpol 0 798775714.77363272.90716833 @@ -202,7 +202,6 @@

    QF_UFLRA (Incremental Track)

    - + - diff --git a/archive/2022/results/qf-uflra-model-validation.html b/archive/2022/results/qf-uflra-model-validation.html index 36c1a7dd..18fb8815 100644 --- a/archive/2022/results/qf-uflra-model-validation.html +++ b/archive/2022/results/qf-uflra-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFLRA (Model Validation Track)

    Competition results for the QF_UFLRA - + logic - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_UFLRA (Model Validation Track)

    Sequential PerformanceParallel Performance Yices2smtinterpol - - + + @@ -126,7 +126,7 @@

    QF_UFLRA (Model Validation Track)

    - + Yices2 0 382 @@ -137,7 +137,7 @@

    QF_UFLRA (Model Validation Track)

    - + smtinterpol 0 382 @@ -148,7 +148,7 @@

    QF_UFLRA (Model Validation Track)

    - + 2021-SMTInterpoln 0 382 @@ -159,7 +159,7 @@

    QF_UFLRA (Model Validation Track)

    - + OpenSMT 0 381 @@ -170,7 +170,7 @@

    QF_UFLRA (Model Validation Track)

    - + cvc5 0 381 @@ -181,7 +181,7 @@

    QF_UFLRA (Model Validation Track)

    - + z3-4.8.17n 0 374 @@ -192,7 +192,7 @@

    QF_UFLRA (Model Validation Track)

    - + MathSATn 0 132 @@ -214,7 +214,7 @@

    QF_UFLRA (Model Validation Track)

    - + 2021-SMTInterpoln 0 3824478.6923256.5642 @@ -223,7 +223,7 @@

    QF_UFLRA (Model Validation Track)

    - + smtinterpol 0 3824320.5683261.8262 @@ -232,7 +232,7 @@

    QF_UFLRA (Model Validation Track)

    - + Yices2 0 3823631.7313632.7583 @@ -241,7 +241,7 @@

    QF_UFLRA (Model Validation Track)

    - + OpenSMT 0 3816112.1016106.2054 @@ -250,7 +250,7 @@

    QF_UFLRA (Model Validation Track)

    - + cvc5 0 3816150.6416150.194 @@ -259,7 +259,7 @@

    QF_UFLRA (Model Validation Track)

    - + z3-4.8.17n 0 37416728.0816608.47711 @@ -268,7 +268,7 @@

    QF_UFLRA (Model Validation Track)

    - + MathSATn 0 1323704.0713704.1743 @@ -292,7 +292,6 @@

    QF_UFLRA (Model Validation Track)

    - + - diff --git a/archive/2022/results/qf-uflra-parallel.html b/archive/2022/results/qf-uflra-parallel.html index 3d36dc46..9870c7fb 100644 --- a/archive/2022/results/qf-uflra-parallel.html +++ b/archive/2022/results/qf-uflra-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFLRA (Parallel Track)

    Competition results for the QF_UFLRA - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    QF_UFLRA (Parallel Track)

    - + SMTS portfolio 1 0174.31700010 @@ -126,7 +126,7 @@

    QF_UFLRA (Parallel Track)

    - + SMTS cube-and-conquer (fixed) 1 0223.49500010 @@ -135,7 +135,7 @@

    QF_UFLRA (Parallel Track)

    - + SMTS cube-and-conquer 1 0241.01700010 @@ -155,7 +155,7 @@

    QF_UFLRA (Parallel Track)

    - + SMTS portfolio 1 0174.317000100 @@ -164,7 +164,7 @@

    QF_UFLRA (Parallel Track)

    - + SMTS cube-and-conquer (fixed) 1 0223.495000100 @@ -173,7 +173,7 @@

    QF_UFLRA (Parallel Track)

    - + SMTS cube-and-conquer 1 0241.017000100 @@ -193,7 +193,7 @@

    QF_UFLRA (Parallel Track)

    - + SMTS cube-and-conquer 0 00.0000010 @@ -202,7 +202,7 @@

    QF_UFLRA (Parallel Track)

    - + SMTS cube-and-conquer (fixed) 0 00.0000010 @@ -211,7 +211,7 @@

    QF_UFLRA (Parallel Track)

    - + SMTS portfolio 0 00.0000010 @@ -231,7 +231,7 @@

    QF_UFLRA (Parallel Track)

    - + SMTS cube-and-conquer 0 024.000011 @@ -240,7 +240,7 @@

    QF_UFLRA (Parallel Track)

    - + SMTS cube-and-conquer (fixed) 0 024.000011 @@ -249,7 +249,7 @@

    QF_UFLRA (Parallel Track)

    - + SMTS portfolio 0 024.000011 @@ -273,7 +273,6 @@

    QF_UFLRA (Parallel Track)

    - + - diff --git a/archive/2022/results/qf-uflra-proof-exhibition.html b/archive/2022/results/qf-uflra-proof-exhibition.html index 53f28995..c2d8de86 100644 --- a/archive/2022/results/qf-uflra-proof-exhibition.html +++ b/archive/2022/results/qf-uflra-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFLRA (Proof Exhibition Track)

    Competition results for the QF_UFLRA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_UFLRA (Proof Exhibition Track)

    - + smtinterpol 0 299 @@ -130,7 +130,7 @@

    QF_UFLRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 261 @@ -141,7 +141,7 @@

    QF_UFLRA (Proof Exhibition Track)

    - + veriT 0 246 @@ -152,7 +152,7 @@

    QF_UFLRA (Proof Exhibition Track)

    - + cvc5 0 20 @@ -174,7 +174,7 @@

    QF_UFLRA (Proof Exhibition Track)

    - + smtinterpol 0 2993369.0272235.57611 @@ -183,7 +183,7 @@

    QF_UFLRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 26147582.71447576.4733939 @@ -192,7 +192,7 @@

    QF_UFLRA (Proof Exhibition Track)

    - + veriT 0 246115.018105.324540 @@ -201,7 +201,7 @@

    QF_UFLRA (Proof Exhibition Track)

    - + cvc5 0 20330250.889330199.207280272 @@ -225,7 +225,6 @@

    QF_UFLRA (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/qf-uflra-single-query.html b/archive/2022/results/qf-uflra-single-query.html index 411a1707..6f517dfc 100644 --- a/archive/2022/results/qf-uflra-single-query.html +++ b/archive/2022/results/qf-uflra-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFLRA (Single Query Track)

    Competition results for the QF_UFLRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_UFLRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Yices2Yices2smtinterpol - - + + Yices2 - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_UFLRA (Single Query Track)

    - + 2021-SMTInterpoln 0 537 @@ -142,7 +142,7 @@

    QF_UFLRA (Single Query Track)

    - + Yices2 0 536 @@ -153,7 +153,7 @@

    QF_UFLRA (Single Query Track)

    - + smtinterpol 0 536 @@ -164,7 +164,7 @@

    QF_UFLRA (Single Query Track)

    - + MathSATn 0 535 @@ -175,7 +175,7 @@

    QF_UFLRA (Single Query Track)

    - + veriT 0 535 @@ -186,7 +186,7 @@

    QF_UFLRA (Single Query Track)

    - + OpenSMT 0 534 @@ -197,7 +197,7 @@

    QF_UFLRA (Single Query Track)

    - + cvc5 0 534 @@ -208,7 +208,7 @@

    QF_UFLRA (Single Query Track)

    - + z3-4.8.17n 0 532 @@ -230,7 +230,7 @@

    QF_UFLRA (Single Query Track)

    - + 2021-SMTInterpoln 0 53810358.6457738.8653831822033 @@ -239,7 +239,7 @@

    QF_UFLRA (Single Query Track)

    - + Yices2 0 5366342.2826343.34553631622055 @@ -248,7 +248,7 @@

    QF_UFLRA (Single Query Track)

    - + smtinterpol 0 53610911.8598770.38353631721955 @@ -257,7 +257,7 @@

    QF_UFLRA (Single Query Track)

    - + MathSATn 0 5357335.1247335.23553531522066 @@ -266,7 +266,7 @@

    QF_UFLRA (Single Query Track)

    - + veriT 0 5357359.6917358.54653531522066 @@ -275,7 +275,7 @@

    QF_UFLRA (Single Query Track)

    - + OpenSMT 0 5349881.7549882.06653431422077 @@ -284,7 +284,7 @@

    QF_UFLRA (Single Query Track)

    - + cvc5 0 53410243.87810244.07853431422077 @@ -293,7 +293,7 @@

    QF_UFLRA (Single Query Track)

    - + z3-4.8.17n 0 53211703.70911701.95753231321999 @@ -313,7 +313,7 @@

    QF_UFLRA (Single Query Track)

    - + 2021-SMTInterpoln 0 3186311.0814570.422318318012223 @@ -322,7 +322,7 @@

    QF_UFLRA (Single Query Track)

    - + smtinterpol 0 3176107.6834736.969317317022225 @@ -331,7 +331,7 @@

    QF_UFLRA (Single Query Track)

    - + Yices2 0 3163932.0763932.669316316032225 @@ -340,7 +340,7 @@

    QF_UFLRA (Single Query Track)

    - + MathSATn 0 3154903.6934903.778315315042226 @@ -349,7 +349,7 @@

    QF_UFLRA (Single Query Track)

    - + veriT 0 3154931.9054930.79315315042226 @@ -358,7 +358,7 @@

    QF_UFLRA (Single Query Track)

    - + OpenSMT 0 3147436.5757436.853314314052227 @@ -367,7 +367,7 @@

    QF_UFLRA (Single Query Track)

    - + cvc5 0 3147659.9897660.199314314052227 @@ -376,7 +376,7 @@

    QF_UFLRA (Single Query Track)

    - + z3-4.8.17n 0 3137911.5127910.117313313062229 @@ -396,7 +396,7 @@

    QF_UFLRA (Single Query Track)

    - + Yices2 0 22010.20610.676220022003215 @@ -405,7 +405,7 @@

    QF_UFLRA (Single Query Track)

    - + veriT 0 22027.78627.756220022003216 @@ -414,7 +414,7 @@

    QF_UFLRA (Single Query Track)

    - + MathSATn 0 22031.43131.457220022003216 @@ -423,7 +423,7 @@

    QF_UFLRA (Single Query Track)

    - + OpenSMT 0 22045.17845.213220022003217 @@ -432,7 +432,7 @@

    QF_UFLRA (Single Query Track)

    - + cvc5 0 220183.889183.879220022003217 @@ -441,7 +441,7 @@

    QF_UFLRA (Single Query Track)

    - + 2021-SMTInterpoln 0 2201647.564768.438220022003213 @@ -450,7 +450,7 @@

    QF_UFLRA (Single Query Track)

    - + z3-4.8.17n 0 2191392.1961391.84219021913219 @@ -459,7 +459,7 @@

    QF_UFLRA (Single Query Track)

    - + smtinterpol 0 2192404.1761633.414219021913215 @@ -479,7 +479,7 @@

    QF_UFLRA (Single Query Track)

    - + Yices2 0 535187.4188.43953531522066 @@ -488,7 +488,7 @@

    QF_UFLRA (Single Query Track)

    - + MathSATn 0 534272.736272.84553431422077 @@ -497,7 +497,7 @@

    QF_UFLRA (Single Query Track)

    - + veriT 0 533269.195268.03853331322088 @@ -506,7 +506,7 @@

    QF_UFLRA (Single Query Track)

    - + 2021-SMTInterpoln 0 5292269.516996.1335293112181212 @@ -515,7 +515,7 @@

    QF_UFLRA (Single Query Track)

    - + smtinterpol 0 5292521.3241082.8975293112181212 @@ -524,7 +524,7 @@

    QF_UFLRA (Single Query Track)

    - + cvc5 0 528502.846502.7795283092191313 @@ -533,7 +533,7 @@

    QF_UFLRA (Single Query Track)

    - + z3-4.8.17n 0 525549.056547.1975253082171616 @@ -542,7 +542,7 @@

    QF_UFLRA (Single Query Track)

    - + OpenSMT 0 523704.222704.3435233042191818 @@ -566,7 +566,6 @@

    QF_UFLRA (Single Query Track)

    - + - diff --git a/archive/2022/results/qf-uflra-unsat-core.html b/archive/2022/results/qf-uflra-unsat-core.html index c0ce7df1..69b6d6a8 100644 --- a/archive/2022/results/qf-uflra-unsat-core.html +++ b/archive/2022/results/qf-uflra-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFLRA (Unsat Core Track)

    Competition results for the QF_UFLRA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_UFLRA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    QF_UFLRA (Unsat Core Track)

    - + z3-4.8.17n 0 69 @@ -137,7 +137,7 @@

    QF_UFLRA (Unsat Core Track)

    - + cvc5 0 69 @@ -148,7 +148,7 @@

    QF_UFLRA (Unsat Core Track)

    - + 2021-MathSAT5n 0 66 @@ -159,7 +159,7 @@

    QF_UFLRA (Unsat Core Track)

    - + MathSATn 0 66 @@ -170,7 +170,7 @@

    QF_UFLRA (Unsat Core Track)

    - + Yices2 0 58 @@ -181,7 +181,7 @@

    QF_UFLRA (Unsat Core Track)

    - + smtinterpol 0 54 @@ -203,7 +203,7 @@

    QF_UFLRA (Unsat Core Track)

    - + z3-4.8.17n 0 6991.35791.1990 @@ -212,7 +212,7 @@

    QF_UFLRA (Unsat Core Track)

    - + cvc5 0 691258.1291258.5230 @@ -221,7 +221,7 @@

    QF_UFLRA (Unsat Core Track)

    - + 2021-MathSAT5n 0 66111.176113.9910 @@ -230,7 +230,7 @@

    QF_UFLRA (Unsat Core Track)

    - + MathSATn 0 66117.617117.7030 @@ -239,7 +239,7 @@

    QF_UFLRA (Unsat Core Track)

    - + Yices2 0 5820.9421.3380 @@ -248,7 +248,7 @@

    QF_UFLRA (Unsat Core Track)

    - + smtinterpol 0 544062.0533781.2673 @@ -272,7 +272,6 @@

    QF_UFLRA (Unsat Core Track)

    - + - diff --git a/archive/2022/results/qf-ufnia-incremental.html b/archive/2022/results/qf-ufnia-incremental.html index 724f1385..9705ff9f 100644 --- a/archive/2022/results/qf-ufnia-incremental.html +++ b/archive/2022/results/qf-ufnia-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFNIA (Incremental Track)

    Competition results for the QF_UFNIA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_UFNIA (Incremental Track)

    Parallel Performance cvc5 - - + + @@ -124,7 +124,7 @@

    QF_UFNIA (Incremental Track)

    - + cvc5 0 132662890.8942858.11300 @@ -133,7 +133,7 @@

    QF_UFNIA (Incremental Track)

    - + z3-4.8.17n 0 1326544.63242.03410 @@ -142,7 +142,7 @@

    QF_UFNIA (Incremental Track)

    - + 2021-z3n 0 1326583.81581.12110 @@ -151,7 +151,7 @@

    QF_UFNIA (Incremental Track)

    - + smtinterpol 0 114671602.093637.16917990 @@ -160,7 +160,7 @@

    QF_UFNIA (Incremental Track)

    - + Yices2 0 01.8796.882132660 @@ -169,7 +169,7 @@

    QF_UFNIA (Incremental Track)

    - + MathSATn 0 02.79310.96132660 @@ -193,7 +193,6 @@

    QF_UFNIA (Incremental Track)

    - + - diff --git a/archive/2022/results/qf-ufnia-proof-exhibition.html b/archive/2022/results/qf-ufnia-proof-exhibition.html index d3296e3c..94126979 100644 --- a/archive/2022/results/qf-ufnia-proof-exhibition.html +++ b/archive/2022/results/qf-ufnia-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFNIA (Proof Exhibition Track)

    Competition results for the QF_UFNIA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_UFNIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 191 @@ -130,7 +130,7 @@

    QF_UFNIA (Proof Exhibition Track)

    - + cvc5 0 180 @@ -141,7 +141,7 @@

    QF_UFNIA (Proof Exhibition Track)

    - + smtinterpol 0 65 @@ -163,7 +163,7 @@

    QF_UFNIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 1916065.1216064.09755 @@ -172,7 +172,7 @@

    QF_UFNIA (Proof Exhibition Track)

    - + cvc5 0 18015864.57715819.2031611 @@ -181,7 +181,7 @@

    QF_UFNIA (Proof Exhibition Track)

    - + smtinterpol 0 65133.87172.041310 @@ -205,7 +205,6 @@

    QF_UFNIA (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/qf-ufnia-single-query.html b/archive/2022/results/qf-ufnia-single-query.html index ca25fb59..9c96f784 100644 --- a/archive/2022/results/qf-ufnia-single-query.html +++ b/archive/2022/results/qf-ufnia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFNIA (Single Query Track)

    Competition results for the QF_UFNIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_UFNIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5Yices2 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    QF_UFNIA (Single Query Track)

    - + z3-4.8.17n 0 243 @@ -142,7 +142,7 @@

    QF_UFNIA (Single Query Track)

    - + cvc5 0 231 @@ -153,7 +153,7 @@

    QF_UFNIA (Single Query Track)

    - + 2020-CVC4n 0 226 @@ -164,7 +164,7 @@

    QF_UFNIA (Single Query Track)

    - + Yices2 0 222 @@ -175,7 +175,7 @@

    QF_UFNIA (Single Query Track)

    - + MathSATn 0 162 @@ -186,7 +186,7 @@

    QF_UFNIA (Single Query Track)

    - + smtinterpol 0 71 @@ -208,7 +208,7 @@

    QF_UFNIA (Single Query Track)

    - + z3-4.8.17n 0 2437251.4417249.962431737066 @@ -217,7 +217,7 @@

    QF_UFNIA (Single Query Track)

    - + cvc5 0 23121959.65421959.692231166651818 @@ -226,7 +226,7 @@

    QF_UFNIA (Single Query Track)

    - + 2020-CVC4n 0 2263663.1123663.07922616264233 @@ -235,7 +235,7 @@

    QF_UFNIA (Single Query Track)

    - + Yices2 0 22232632.75332632.924222172502727 @@ -244,7 +244,7 @@

    QF_UFNIA (Single Query Track)

    - + MathSATn 0 16250064.49450064.687162115478741 @@ -253,7 +253,7 @@

    QF_UFNIA (Single Query Track)

    - + smtinterpol 0 71160.968101.1297155161780 @@ -273,7 +273,7 @@

    QF_UFNIA (Single Query Track)

    - + z3-4.8.17n 0 17345.31745.02517317300766 @@ -282,7 +282,7 @@

    QF_UFNIA (Single Query Track)

    - + Yices2 0 1721430.9121431.01172172017627 @@ -291,7 +291,7 @@

    QF_UFNIA (Single Query Track)

    - + cvc5 0 1668615.8718615.893166166077618 @@ -300,7 +300,7 @@

    QF_UFNIA (Single Query Track)

    - + 2020-CVC4n 0 1622454.5842454.563162162011763 @@ -309,7 +309,7 @@

    QF_UFNIA (Single Query Track)

    - + MathSATn 0 11533236.14133236.3211151150587641 @@ -318,7 +318,7 @@

    QF_UFNIA (Single Query Track)

    - + smtinterpol 0 55110.92970.02155550118760 @@ -338,7 +338,7 @@

    QF_UFNIA (Single Query Track)

    - + z3-4.8.17n 0 701206.1241204.9357007011786 @@ -347,7 +347,7 @@

    QF_UFNIA (Single Query Track)

    - + cvc5 0 657343.7837343.79965065617818 @@ -356,7 +356,7 @@

    QF_UFNIA (Single Query Track)

    - + 2020-CVC4n 0 647.4017.3886406471783 @@ -365,7 +365,7 @@

    QF_UFNIA (Single Query Track)

    - + Yices2 0 5025201.84225201.913500502117827 @@ -374,7 +374,7 @@

    QF_UFNIA (Single Query Track)

    - + MathSATn 0 4710828.35310828.366470472417841 @@ -383,7 +383,7 @@

    QF_UFNIA (Single Query Track)

    - + smtinterpol 0 1647.17429.21416016551780 @@ -403,7 +403,7 @@

    QF_UFNIA (Single Query Track)

    - + z3-4.8.17n 0 242188.709187.2262421727077 @@ -412,7 +412,7 @@

    QF_UFNIA (Single Query Track)

    - + cvc5 0 228660.239660.225228164642121 @@ -421,7 +421,7 @@

    QF_UFNIA (Single Query Track)

    - + 2020-CVC4n 0 225124.539124.50322516164244 @@ -430,7 +430,7 @@

    QF_UFNIA (Single Query Track)

    - + Yices2 0 218754.451754.6218168503131 @@ -439,7 +439,7 @@

    QF_UFNIA (Single Query Track)

    - + MathSATn 0 1571275.3651275.47157110479246 @@ -448,7 +448,7 @@

    QF_UFNIA (Single Query Track)

    - + smtinterpol 0 71160.968101.1297155161780 @@ -472,7 +472,6 @@

    QF_UFNIA (Single Query Track)

    - + - diff --git a/archive/2022/results/qf-ufnia-unsat-core.html b/archive/2022/results/qf-ufnia-unsat-core.html index e49ede76..2e31b33b 100644 --- a/archive/2022/results/qf-ufnia-unsat-core.html +++ b/archive/2022/results/qf-ufnia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFNIA (Unsat Core Track)

    Competition results for the QF_UFNIA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_UFNIA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    QF_UFNIA (Unsat Core Track)

    - + z3-4.8.17n 0 1126 @@ -137,7 +137,7 @@

    QF_UFNIA (Unsat Core Track)

    - + cvc5 0 1081 @@ -148,7 +148,7 @@

    QF_UFNIA (Unsat Core Track)

    - + 2021-cvc5-ucn 0 1006 @@ -159,7 +159,7 @@

    QF_UFNIA (Unsat Core Track)

    - + MathSATn 0 926 @@ -170,7 +170,7 @@

    QF_UFNIA (Unsat Core Track)

    - + smtinterpol 0 506 @@ -192,7 +192,7 @@

    QF_UFNIA (Unsat Core Track)

    - + z3-4.8.17n 0 11267.9028.8370 @@ -201,7 +201,7 @@

    QF_UFNIA (Unsat Core Track)

    - + cvc5 0 10814899.794899.7244 @@ -210,7 +210,7 @@

    QF_UFNIA (Unsat Core Track)

    - + 2021-cvc5-ucn 0 10061219.2521218.1341 @@ -219,7 +219,7 @@

    QF_UFNIA (Unsat Core Track)

    - + MathSATn 0 92610943.42410943.4969 @@ -228,7 +228,7 @@

    QF_UFNIA (Unsat Core Track)

    - + smtinterpol 0 506123.45792.8470 @@ -252,7 +252,6 @@

    QF_UFNIA (Unsat Core Track)

    - + - diff --git a/archive/2022/results/qf-ufnra-incremental.html b/archive/2022/results/qf-ufnra-incremental.html index 258437e3..2aafb038 100644 --- a/archive/2022/results/qf-ufnra-incremental.html +++ b/archive/2022/results/qf-ufnra-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFNRA (Incremental Track)

    Competition results for the QF_UFNRA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_UFNRA (Incremental Track)

    Parallel Performance cvc5 - - + + @@ -124,7 +124,7 @@

    QF_UFNRA (Incremental Track)

    - + 2021-z3n 0 60.0330.03100 @@ -133,7 +133,7 @@

    QF_UFNRA (Incremental Track)

    - + z3-4.8.17n 0 60.0330.03200 @@ -142,7 +142,7 @@

    QF_UFNRA (Incremental Track)

    - + cvc5 0 60.0540.05200 @@ -151,7 +151,7 @@

    QF_UFNRA (Incremental Track)

    - + smtinterpol 0 60.6970.42700 @@ -160,7 +160,7 @@

    QF_UFNRA (Incremental Track)

    - + MathSATn 0 00.0050.0260 @@ -184,7 +184,6 @@

    QF_UFNRA (Incremental Track)

    - + - diff --git a/archive/2022/results/qf-ufnra-proof-exhibition.html b/archive/2022/results/qf-ufnra-proof-exhibition.html index 7f333435..285e2ba0 100644 --- a/archive/2022/results/qf-ufnra-proof-exhibition.html +++ b/archive/2022/results/qf-ufnra-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFNRA (Proof Exhibition Track)

    Competition results for the QF_UFNRA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_UFNRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 11 @@ -130,7 +130,7 @@

    QF_UFNRA (Proof Exhibition Track)

    - + cvc5 0 11 @@ -141,7 +141,7 @@

    QF_UFNRA (Proof Exhibition Track)

    - + smtinterpol 0 8 @@ -163,7 +163,7 @@

    QF_UFNRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 119.8059.79900 @@ -172,7 +172,7 @@

    QF_UFNRA (Proof Exhibition Track)

    - + cvc5 0 1148.82947.43100 @@ -181,7 +181,7 @@

    QF_UFNRA (Proof Exhibition Track)

    - + smtinterpol 0 88.9644.61730 @@ -205,7 +205,6 @@

    QF_UFNRA (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/qf-ufnra-single-query.html b/archive/2022/results/qf-ufnra-single-query.html index b1fa0747..bf69abf9 100644 --- a/archive/2022/results/qf-ufnra-single-query.html +++ b/archive/2022/results/qf-ufnra-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFNRA (Single Query Track)

    Competition results for the QF_UFNRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_UFNRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Yices2Yices2Yices2 - - + + Yices2 - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_UFNRA (Single Query Track)

    - + z3-4.8.17n 0 25 @@ -142,7 +142,7 @@

    QF_UFNRA (Single Query Track)

    - + Yices2 0 24 @@ -153,7 +153,7 @@

    QF_UFNRA (Single Query Track)

    - + cvc5 0 20 @@ -164,7 +164,7 @@

    QF_UFNRA (Single Query Track)

    - + 2020-CVC4n 0 15 @@ -175,7 +175,7 @@

    QF_UFNRA (Single Query Track)

    - + MathSATn 0 9 @@ -186,7 +186,7 @@

    QF_UFNRA (Single Query Track)

    - + smtinterpol 0 1 @@ -197,7 +197,7 @@

    QF_UFNRA (Single Query Track)

    - + veriT+raSAT+Redlog 0 1 @@ -219,7 +219,7 @@

    QF_UFNRA (Single Query Track)

    - + z3-4.8.17n 0 253254.7573246.1662523222 @@ -228,7 +228,7 @@

    QF_UFNRA (Single Query Track)

    - + Yices2 0 2410790.13110790.8432422233 @@ -237,7 +237,7 @@

    QF_UFNRA (Single Query Track)

    - + cvc5 0 2012067.05112067.4182018277 @@ -246,7 +246,7 @@

    QF_UFNRA (Single Query Track)

    - + 2020-CVC4n 0 1516007.08616004.333151321212 @@ -255,7 +255,7 @@

    QF_UFNRA (Single Query Track)

    - + MathSATn 0 920494.44720494.4589721817 @@ -264,7 +264,7 @@

    QF_UFNRA (Single Query Track)

    - + smtinterpol 0 115.39310.231110260 @@ -273,7 +273,7 @@

    QF_UFNRA (Single Query Track)

    - + veriT+raSAT+Redlog 0 131200.00831200.0141102626 @@ -293,7 +293,7 @@

    QF_UFNRA (Single Query Track)

    - + z3-4.8.17n 0 232054.432045.84223230132 @@ -302,7 +302,7 @@

    QF_UFNRA (Single Query Track)

    - + Yices2 0 229589.7539590.46522220233 @@ -311,7 +311,7 @@

    QF_UFNRA (Single Query Track)

    - + cvc5 0 1810858.21310858.58118180637 @@ -320,7 +320,7 @@

    QF_UFNRA (Single Query Track)

    - + 2020-CVC4n 0 1314797.2314797.7171313011312 @@ -329,7 +329,7 @@

    QF_UFNRA (Single Query Track)

    - + MathSATn 0 719294.20219294.21277017317 @@ -338,7 +338,7 @@

    QF_UFNRA (Single Query Track)

    - + smtinterpol 0 113.6349.0611102330 @@ -347,7 +347,7 @@

    QF_UFNRA (Single Query Track)

    - + veriT+raSAT+Redlog 0 127600.00827600.01411023326 @@ -367,7 +367,7 @@

    QF_UFNRA (Single Query Track)

    - + MathSATn 0 20.2460.24620202517 @@ -376,7 +376,7 @@

    QF_UFNRA (Single Query Track)

    - + z3-4.8.17n 0 20.3280.3242020252 @@ -385,7 +385,7 @@

    QF_UFNRA (Single Query Track)

    - + Yices2 0 20.3780.3782020253 @@ -394,7 +394,7 @@

    QF_UFNRA (Single Query Track)

    - + 2020-CVC4n 0 29.8576.61620202512 @@ -403,7 +403,7 @@

    QF_UFNRA (Single Query Track)

    - + cvc5 0 28.8388.8382020257 @@ -412,7 +412,7 @@

    QF_UFNRA (Single Query Track)

    - + smtinterpol 0 01.180.7810002250 @@ -421,7 +421,7 @@

    QF_UFNRA (Single Query Track)

    - + veriT+raSAT+Redlog 0 02400.02400.000022526 @@ -441,7 +441,7 @@

    QF_UFNRA (Single Query Track)

    - + z3-4.8.17n 0 20360.341351.6872018277 @@ -450,7 +450,7 @@

    QF_UFNRA (Single Query Track)

    - + MathSATn 0 8500.388500.3948621918 @@ -459,7 +459,7 @@

    QF_UFNRA (Single Query Track)

    - + Yices2 0 6521.669521.6736422121 @@ -468,7 +468,7 @@

    QF_UFNRA (Single Query Track)

    - + 2020-CVC4n 0 4570.483567.2484222323 @@ -477,7 +477,7 @@

    QF_UFNRA (Single Query Track)

    - + cvc5 0 3584.874584.8733122424 @@ -486,7 +486,7 @@

    QF_UFNRA (Single Query Track)

    - + smtinterpol 0 115.39310.231110260 @@ -495,7 +495,7 @@

    QF_UFNRA (Single Query Track)

    - + veriT+raSAT+Redlog 0 1624.008624.0141102626 @@ -519,7 +519,6 @@

    QF_UFNRA (Single Query Track)

    - + - diff --git a/archive/2022/results/qf-ufnra-unsat-core.html b/archive/2022/results/qf-ufnra-unsat-core.html index 8684b9ab..938f14e3 100644 --- a/archive/2022/results/qf-ufnra-unsat-core.html +++ b/archive/2022/results/qf-ufnra-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFNRA (Unsat Core Track)

    Competition results for the QF_UFNRA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_UFNRA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    QF_UFNRA (Unsat Core Track)

    - + z3-4.8.17n 0 66 @@ -137,7 +137,7 @@

    QF_UFNRA (Unsat Core Track)

    - + MathSATn 0 64 @@ -148,7 +148,7 @@

    QF_UFNRA (Unsat Core Track)

    - + 2021-cvc5-ucn 0 60 @@ -159,7 +159,7 @@

    QF_UFNRA (Unsat Core Track)

    - + cvc5 0 56 @@ -170,7 +170,7 @@

    QF_UFNRA (Unsat Core Track)

    - + smtinterpol 0 23 @@ -192,7 +192,7 @@

    QF_UFNRA (Unsat Core Track)

    - + z3-4.8.17n 0 660.4560.4350 @@ -201,7 +201,7 @@

    QF_UFNRA (Unsat Core Track)

    - + MathSATn 0 642.4092.4170 @@ -210,7 +210,7 @@

    QF_UFNRA (Unsat Core Track)

    - + 2021-cvc5-ucn 0 6015.98816.0030 @@ -219,7 +219,7 @@

    QF_UFNRA (Unsat Core Track)

    - + cvc5 0 568.2958.290 @@ -228,7 +228,7 @@

    QF_UFNRA (Unsat Core Track)

    - + smtinterpol 0 237.0784.670 @@ -252,7 +252,6 @@

    QF_UFNRA (Unsat Core Track)

    - + - diff --git a/archive/2022/results/results-cloud.html b/archive/2022/results/results-cloud.html index 809357bb..abc1eb1b 100644 --- a/archive/2022/results/results-cloud.html +++ b/archive/2022/results/results-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -133,7 +133,6 @@

    SMT-COMP 2022 Results - Cloud Track (Summary)

    - + - diff --git a/archive/2022/results/results-incremental.html b/archive/2022/results/results-incremental.html index bf97040a..25dee1eb 100644 --- a/archive/2022/results/results-incremental.html +++ b/archive/2022/results/results-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -146,7 +146,6 @@

    SMT-COMP 2022 Results - Incremental Track (Summary)

    - + - diff --git a/archive/2022/results/results-model-validation.html b/archive/2022/results/results-model-validation.html index 329d1e7d..e5fae248 100644 --- a/archive/2022/results/results-model-validation.html +++ b/archive/2022/results/results-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -120,7 +120,6 @@

    SMT-COMP 2022 Results - Model Validation Track (Summary)

    - + - diff --git a/archive/2022/results/results-parallel.html b/archive/2022/results/results-parallel.html index bc7c8c87..3c451d0a 100644 --- a/archive/2022/results/results-parallel.html +++ b/archive/2022/results/results-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -133,7 +133,6 @@

    SMT-COMP 2022 Results - Parallel Track (Summary)

    - + - diff --git a/archive/2022/results/results-single-query.html b/archive/2022/results/results-single-query.html index 5c900dc5..b1c2622c 100644 --- a/archive/2022/results/results-single-query.html +++ b/archive/2022/results/results-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -234,7 +234,6 @@

    SMT-COMP 2022 Results - Single Query Track (Summary)

    - + - diff --git a/archive/2022/results/results-unsat-core.html b/archive/2022/results/results-unsat-core.html index 46df909e..46883c76 100644 --- a/archive/2022/results/results-unsat-core.html +++ b/archive/2022/results/results-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -162,7 +162,6 @@

    SMT-COMP 2022 Results - Unsat Core Track (Summary)

    - + - diff --git a/archive/2022/results/uf-cloud.html b/archive/2022/results/uf-cloud.html index 490b554a..005a3f7c 100644 --- a/archive/2022/results/uf-cloud.html +++ b/archive/2022/results/uf-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UF (Cloud Track)

    Competition results for the UF - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    UF (Cloud Track)

    - + Vampire 0 1518948.79115411260 @@ -126,7 +126,7 @@

    UF (Cloud Track)

    - + cvc5-cloud 5 246914.701202390 @@ -146,7 +146,7 @@

    UF (Cloud Track)

    - + Vampire 0 4708.8184400370 @@ -155,7 +155,7 @@

    UF (Cloud Track)

    - + cvc5-cloud 0 04800.00004370 @@ -175,7 +175,7 @@

    UF (Cloud Track)

    - + Vampire 0 112639.973110111290 @@ -184,7 +184,7 @@

    UF (Cloud Track)

    - + cvc5-cloud 5 212114.70120210290 @@ -204,7 +204,7 @@

    UF (Cloud Track)

    - + Vampire 0 5669.7315143624 @@ -213,7 +213,7 @@

    UF (Cloud Track)

    - + cvc5-cloud 0 1978.4641014040 @@ -237,7 +237,6 @@

    UF (Cloud Track)

    - + - diff --git a/archive/2022/results/uf-incremental.html b/archive/2022/results/uf-incremental.html index cf62ff7c..926c99c7 100644 --- a/archive/2022/results/uf-incremental.html +++ b/archive/2022/results/uf-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UF (Incremental Track)

    Competition results for the UF - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    UF (Incremental Track)

    Parallel Performance cvc5 - - + + @@ -124,7 +124,7 @@

    UF (Incremental Track)

    - + 2020-z3n 0 1052391158150.9851158262.15246761892 @@ -133,7 +133,7 @@

    UF (Incremental Track)

    - + z3-4.8.17n 0 1048181147915.3361148298.275247182885 @@ -142,7 +142,7 @@

    UF (Incremental Track)

    - + cvc5 0 23122985596.863985388.285328878804 @@ -151,7 +151,7 @@

    UF (Incremental Track)

    - + smtinterpol 0 191362060143.1572034591.1543328641598 @@ -160,7 +160,7 @@

    UF (Incremental Track)

    - + UltimateEliminator+MathSAT 0 08811.3623907.3143520000 @@ -184,7 +184,6 @@

    UF (Incremental Track)

    - + - diff --git a/archive/2022/results/uf-parallel.html b/archive/2022/results/uf-parallel.html index 0a11a6a2..18586ac4 100644 --- a/archive/2022/results/uf-parallel.html +++ b/archive/2022/results/uf-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UF (Parallel Track)

    Competition results for the UF - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    UF (Parallel Track)

    - + Vampire 0 1532846.07115213260 @@ -137,7 +137,7 @@

    UF (Parallel Track)

    - + Vampire 0 2830.392200390 @@ -157,7 +157,7 @@

    UF (Parallel Track)

    - + Vampire 0 13815.681130130280 @@ -177,7 +177,7 @@

    UF (Parallel Track)

    - + Vampire 0 8818.8528173333 @@ -201,7 +201,6 @@

    UF (Parallel Track)

    - + - diff --git a/archive/2022/results/uf-proof-exhibition.html b/archive/2022/results/uf-proof-exhibition.html index 83486d26..43030e88 100644 --- a/archive/2022/results/uf-proof-exhibition.html +++ b/archive/2022/results/uf-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UF (Proof Exhibition Track)

    Competition results for the UF - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    UF (Proof Exhibition Track)

    - + cvc5-lfsc 0 1618 @@ -130,7 +130,7 @@

    UF (Proof Exhibition Track)

    - + veriT 0 1594 @@ -141,7 +141,7 @@

    UF (Proof Exhibition Track)

    - + cvc5 0 1568 @@ -152,7 +152,7 @@

    UF (Proof Exhibition Track)

    - + smtinterpol 0 959 @@ -174,7 +174,7 @@

    UF (Proof Exhibition Track)

    - + cvc5-lfsc 0 1618210254.341210256.973166166 @@ -183,7 +183,7 @@

    UF (Proof Exhibition Track)

    - + veriT 0 1594218953.346218942.15190150 @@ -192,7 +192,7 @@

    UF (Proof Exhibition Track)

    - + cvc5 0 1568259850.236259730.123216202 @@ -201,7 +201,7 @@

    UF (Proof Exhibition Track)

    - + smtinterpol 0 9591313331.254962591.208825655 @@ -225,7 +225,6 @@

    UF (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/uf-single-query.html b/archive/2022/results/uf-single-query.html index dee5df40..5e3e776e 100644 --- a/archive/2022/results/uf-single-query.html +++ b/archive/2022/results/uf-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UF (Single Query Track)

    Competition results for the UF - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UF (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) VampireVampireVampire - - + + cvc5 - - + + Vampire - + @@ -131,7 +131,7 @@

    UF (Single Query Track)

    - + Vampire 0 1174 @@ -142,7 +142,7 @@

    UF (Single Query Track)

    - + cvc5 0 1162 @@ -153,7 +153,7 @@

    UF (Single Query Track)

    - + 2020-CVC4n 0 1151 @@ -164,7 +164,7 @@

    UF (Single Query Track)

    - + veriT 0 669 @@ -175,7 +175,7 @@

    UF (Single Query Track)

    - + z3-4.8.17n 0 478 @@ -186,7 +186,7 @@

    UF (Single Query Track)

    - + Yices2 0 345 @@ -197,7 +197,7 @@

    UF (Single Query Track)

    - + smtinterpol 0 200 @@ -208,7 +208,7 @@

    UF (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -230,7 +230,7 @@

    UF (Single Query Track)

    - + Vampire 0 12192509811.5932001426.676121947374616381626 @@ -239,7 +239,7 @@

    UF (Single Query Track)

    - + cvc5 0 11622256199.0282259756.349116239277016951695 @@ -248,7 +248,7 @@

    UF (Single Query Track)

    - + 2020-CVC4n 0 11512266893.3822270123.615115139775417061706 @@ -257,7 +257,7 @@

    UF (Single Query Track)

    - + veriT 0 6692599411.7432599189.215669066921882050 @@ -266,7 +266,7 @@

    UF (Single Query Track)

    - + z3-4.8.17n 0 4782188307.8572188305.1894786141723791409 @@ -275,7 +275,7 @@

    UF (Single Query Track)

    - + Yices2 0 3453028480.453028452.1793453930625122512 @@ -284,7 +284,7 @@

    UF (Single Query Track)

    - + smtinterpol 0 2033332603.0253154938.852203719626542545 @@ -293,7 +293,7 @@

    UF (Single Query Track)

    - + UltimateEliminator+MathSAT 0 015457.0229606.88700028570 @@ -313,7 +313,7 @@

    UF (Single Query Track)

    - + Vampire 0 47351093.53819206.40947347301523691626 @@ -322,7 +322,7 @@

    UF (Single Query Track)

    - + 2020-CVC4n 0 397297721.317300654.00239739709123691706 @@ -331,7 +331,7 @@

    UF (Single Query Track)

    - + cvc5 0 392309418.903312799.90939239209623691695 @@ -340,7 +340,7 @@

    UF (Single Query Track)

    - + z3-4.8.17n 0 61383672.829383684.376161042723691409 @@ -349,7 +349,7 @@

    UF (Single Query Track)

    - + Yices2 0 39539261.894539262.0613939044923692512 @@ -358,7 +358,7 @@

    UF (Single Query Track)

    - + smtinterpol 0 7578684.38552752.46277048123692545 @@ -367,7 +367,7 @@

    UF (Single Query Track)

    - + UltimateEliminator+MathSAT 0 02412.8981420.14500048823690 @@ -376,7 +376,7 @@

    UF (Single Query Track)

    - + veriT 0 0547095.18546988.79800048823692050 @@ -396,7 +396,7 @@

    UF (Single Query Track)

    - + cvc5 0 770105980.125106156.43977007706520221695 @@ -405,7 +405,7 @@

    UF (Single Query Track)

    - + 2020-CVC4n 0 754128372.065128669.61275407548120221706 @@ -414,7 +414,7 @@

    UF (Single Query Track)

    - + Vampire 0 746279501.035142215.36774607468920221626 @@ -423,7 +423,7 @@

    UF (Single Query Track)

    - + veriT 0 669215249.502215133.268669066916620222050 @@ -432,7 +432,7 @@

    UF (Single Query Track)

    - + z3-4.8.17n 0 417405853.876405793.241417041741820221409 @@ -441,7 +441,7 @@

    UF (Single Query Track)

    - + Yices2 0 306648418.555648390.119306030652920222512 @@ -450,7 +450,7 @@

    UF (Single Query Track)

    - + smtinterpol 0 196814790.415781334.398196019663920222545 @@ -459,7 +459,7 @@

    UF (Single Query Track)

    - + UltimateEliminator+MathSAT 0 05391.5343652.61800083520220 @@ -479,7 +479,7 @@

    UF (Single Query Track)

    - + Vampire 0 93153132.22847874.61393137156019261915 @@ -488,7 +488,7 @@

    UF (Single Query Track)

    - + veriT 0 62254013.38754002.354622062222352142 @@ -497,7 +497,7 @@

    UF (Single Query Track)

    - + cvc5 0 62154274.65454274.7746211161022362236 @@ -506,7 +506,7 @@

    UF (Single Query Track)

    - + 2020-CVC4n 0 59255038.88655036.268592958322652265 @@ -515,7 +515,7 @@

    UF (Single Query Track)

    - + z3-4.8.17n 0 42358807.05858806.3514235636724342395 @@ -524,7 +524,7 @@

    UF (Single Query Track)

    - + Yices2 0 28362265.26462265.5412833624725742574 @@ -533,7 +533,7 @@

    UF (Single Query Track)

    - + smtinterpol 0 12366549.42265835.163123511827342719 @@ -542,7 +542,7 @@

    UF (Single Query Track)

    - + UltimateEliminator+MathSAT 0 014281.0228430.88700028570 @@ -566,7 +566,6 @@

    UF (Single Query Track)

    - + - diff --git a/archive/2022/results/uf-unsat-core.html b/archive/2022/results/uf-unsat-core.html index a78af127..17b117ec 100644 --- a/archive/2022/results/uf-unsat-core.html +++ b/archive/2022/results/uf-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UF (Unsat Core Track)

    Competition results for the UF - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    UF (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5Vampire - - + + @@ -126,7 +126,7 @@

    UF (Unsat Core Track)

    - + cvc5 0 487601 @@ -137,7 +137,7 @@

    UF (Unsat Core Track)

    - + 2020-CVC4-ucn 0 486666 @@ -148,7 +148,7 @@

    UF (Unsat Core Track)

    - + Vampire 0 484997 @@ -159,7 +159,7 @@

    UF (Unsat Core Track)

    - + z3-4.8.17n 0 410306 @@ -170,7 +170,7 @@

    UF (Unsat Core Track)

    - + smtinterpol 0 280587 @@ -181,7 +181,7 @@

    UF (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 1 @@ -203,7 +203,7 @@

    UF (Unsat Core Track)

    - + Vampire 0 498410312803.051154281.76391 @@ -212,7 +212,7 @@

    UF (Unsat Core Track)

    - + cvc5 0 487601231196.886231182.034188 @@ -221,7 +221,7 @@

    UF (Unsat Core Track)

    - + 2020-CVC4-ucn 0 486666259422.651259498.637210 @@ -230,7 +230,7 @@

    UF (Unsat Core Track)

    - + z3-4.8.17n 0 410306402907.145402843.051249 @@ -239,7 +239,7 @@

    UF (Unsat Core Track)

    - + smtinterpol 0 2823701107247.7661044046.947814 @@ -248,7 +248,7 @@

    UF (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 19015.3035200.1280 @@ -272,7 +272,6 @@

    UF (Unsat Core Track)

    - + - diff --git a/archive/2022/results/ufbv-proof-exhibition.html b/archive/2022/results/ufbv-proof-exhibition.html index cf6ca7aa..66785c73 100644 --- a/archive/2022/results/ufbv-proof-exhibition.html +++ b/archive/2022/results/ufbv-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFBV (Proof Exhibition Track)

    Competition results for the UFBV - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    UFBV (Proof Exhibition Track)

    - + cvc5-lfsc 0 58 @@ -130,7 +130,7 @@

    UFBV (Proof Exhibition Track)

    - + cvc5 0 27 @@ -152,7 +152,7 @@

    UFBV (Proof Exhibition Track)

    - + cvc5-lfsc 0 5828439.22928439.5476817 @@ -161,7 +161,7 @@

    UFBV (Proof Exhibition Track)

    - + cvc5 0 2757005.66756962.1159940 @@ -185,7 +185,6 @@

    UFBV (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/ufbv-single-query.html b/archive/2022/results/ufbv-single-query.html index 30ded2f9..7ac63985 100644 --- a/archive/2022/results/ufbv-single-query.html +++ b/archive/2022/results/ufbv-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFBV (Single Query Track)

    Competition results for the UFBV - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UFBV (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + Bitwuzla - - + + Bitwuzla - + @@ -131,7 +131,7 @@

    UFBV (Single Query Track)

    - + z3-4.8.17n 0 101 @@ -142,7 +142,7 @@

    UFBV (Single Query Track)

    - + cvc5 0 94 @@ -153,7 +153,7 @@

    UFBV (Single Query Track)

    - + Bitwuzla 0 94 @@ -164,7 +164,7 @@

    UFBV (Single Query Track)

    - + 2021-cvc5n 0 80 @@ -175,7 +175,7 @@

    UFBV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 6 @@ -197,7 +197,7 @@

    UFBV (Single Query Track)

    - + z3-4.8.17n 0 10150631.62950631.94110132694239 @@ -206,7 +206,7 @@

    UFBV (Single Query Track)

    - + cvc5 0 9442365.06142830.2019418764918 @@ -215,7 +215,7 @@

    UFBV (Single Query Track)

    - + Bitwuzla 0 9460441.32260441.5449417774949 @@ -224,7 +224,7 @@

    UFBV (Single Query Track)

    - + 2021-cvc5n 0 8055726.55656059.221803776329 @@ -233,7 +233,7 @@

    UFBV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 612668.44212146.8886061374 @@ -253,7 +253,7 @@

    UFBV (Single Query Track)

    - + z3-4.8.17n 0 324930.7754930.74632320410739 @@ -262,7 +262,7 @@

    UFBV (Single Query Track)

    - + cvc5 0 1811842.04812083.54181801810718 @@ -271,7 +271,7 @@

    UFBV (Single Query Track)

    - + Bitwuzla 0 1722839.5322839.537171701910749 @@ -280,7 +280,7 @@

    UFBV (Single Query Track)

    - + 2021-cvc5n 0 324679.27124870.7233303310729 @@ -289,7 +289,7 @@

    UFBV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 02885.972732.433000361074 @@ -309,7 +309,7 @@

    UFBV (Single Query Track)

    - + 2021-cvc5n 0 7710024.82910073.5467707795729 @@ -318,7 +318,7 @@

    UFBV (Single Query Track)

    - + Bitwuzla 0 7712401.79212402.0077707795749 @@ -327,7 +327,7 @@

    UFBV (Single Query Track)

    - + cvc5 0 769504.6359641.05876076105718 @@ -336,7 +336,7 @@

    UFBV (Single Query Track)

    - + z3-4.8.17n 0 6920500.85420501.19569069175739 @@ -345,7 +345,7 @@

    UFBV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 64315.6714037.06860680574 @@ -365,7 +365,7 @@

    UFBV (Single Query Track)

    - + z3-4.8.17n 0 961305.9831305.8349631654747 @@ -374,7 +374,7 @@

    UFBV (Single Query Track)

    - + Bitwuzla 0 901398.0571398.0929017735353 @@ -383,7 +383,7 @@

    UFBV (Single Query Track)

    - + cvc5 0 602139.392121.784601598383 @@ -392,7 +392,7 @@

    UFBV (Single Query Track)

    - + 2021-cvc5n 0 592146.1532146.158590598484 @@ -401,7 +401,7 @@

    UFBV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 51243.8916.9450513811 @@ -425,7 +425,6 @@

    UFBV (Single Query Track)

    - + - diff --git a/archive/2022/results/ufbvfp-proof-exhibition.html b/archive/2022/results/ufbvfp-proof-exhibition.html index 5a9859ba..f989fa7b 100644 --- a/archive/2022/results/ufbvfp-proof-exhibition.html +++ b/archive/2022/results/ufbvfp-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFBVFP (Proof Exhibition Track)

    Competition results for the UFBVFP - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    UFBVFP (Proof Exhibition Track)

    - + cvc5-lfsc 0 1 @@ -130,7 +130,7 @@

    UFBVFP (Proof Exhibition Track)

    - + cvc5 0 0 @@ -152,7 +152,7 @@

    UFBVFP (Proof Exhibition Track)

    - + cvc5-lfsc 0 121.15521.16500 @@ -161,7 +161,7 @@

    UFBVFP (Proof Exhibition Track)

    - + cvc5 0 01200.01200.011 @@ -185,7 +185,6 @@

    UFBVFP (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/ufbvfp-single-query.html b/archive/2022/results/ufbvfp-single-query.html index b7cb0ada..1ad045f5 100644 --- a/archive/2022/results/ufbvfp-single-query.html +++ b/archive/2022/results/ufbvfp-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFBVFP (Single Query Track)

    Competition results for the UFBVFP - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UFBVFP (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5— - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    UFBVFP (Single Query Track)

    - + 2021-cvc5n 0 1 @@ -142,7 +142,7 @@

    UFBVFP (Single Query Track)

    - + cvc5 0 1 @@ -153,7 +153,7 @@

    UFBVFP (Single Query Track)

    - + Bitwuzla 0 1 @@ -164,7 +164,7 @@

    UFBVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -175,7 +175,7 @@

    UFBVFP (Single Query Track)

    - + z3-4.8.17n 0 0 @@ -197,7 +197,7 @@

    UFBVFP (Single Query Track)

    - + 2021-cvc5n 0 116.26216.26110110 @@ -206,7 +206,7 @@

    UFBVFP (Single Query Track)

    - + cvc5 0 117.32117.3210110 @@ -215,7 +215,7 @@

    UFBVFP (Single Query Track)

    - + Bitwuzla 0 11200.9251200.92510111 @@ -224,7 +224,7 @@

    UFBVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 08.7825.44800020 @@ -233,7 +233,7 @@

    UFBVFP (Single Query Track)

    - + z3-4.8.17n 0 02400.02400.000022 @@ -253,7 +253,7 @@

    UFBVFP (Single Query Track)

    - + 2021-cvc5n 0 00.00.0000020 @@ -262,7 +262,7 @@

    UFBVFP (Single Query Track)

    - + Bitwuzla 0 00.00.0000021 @@ -271,7 +271,7 @@

    UFBVFP (Single Query Track)

    - + cvc5 0 00.00.0000020 @@ -280,7 +280,7 @@

    UFBVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.0000020 @@ -289,7 +289,7 @@

    UFBVFP (Single Query Track)

    - + z3-4.8.17n 0 00.00.0000022 @@ -309,7 +309,7 @@

    UFBVFP (Single Query Track)

    - + cvc5 0 10.1830.182101010 @@ -318,7 +318,7 @@

    UFBVFP (Single Query Track)

    - + 2021-cvc5n 0 10.2090.208101010 @@ -327,7 +327,7 @@

    UFBVFP (Single Query Track)

    - + Bitwuzla 0 10.9250.925101011 @@ -336,7 +336,7 @@

    UFBVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 04.3392.632000110 @@ -345,7 +345,7 @@

    UFBVFP (Single Query Track)

    - + z3-4.8.17n 0 01200.01200.0000112 @@ -365,7 +365,7 @@

    UFBVFP (Single Query Track)

    - + 2021-cvc5n 0 116.26216.26110110 @@ -374,7 +374,7 @@

    UFBVFP (Single Query Track)

    - + cvc5 0 117.32117.3210110 @@ -383,7 +383,7 @@

    UFBVFP (Single Query Track)

    - + Bitwuzla 0 124.92524.92510111 @@ -392,7 +392,7 @@

    UFBVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 08.7825.44800020 @@ -401,7 +401,7 @@

    UFBVFP (Single Query Track)

    - + z3-4.8.17n 0 048.048.000022 @@ -425,7 +425,6 @@

    UFBVFP (Single Query Track)

    - + - diff --git a/archive/2022/results/ufbvlia-proof-exhibition.html b/archive/2022/results/ufbvlia-proof-exhibition.html index 6e3745c5..02773990 100644 --- a/archive/2022/results/ufbvlia-proof-exhibition.html +++ b/archive/2022/results/ufbvlia-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFBVLIA (Proof Exhibition Track)

    Competition results for the UFBVLIA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    UFBVLIA (Proof Exhibition Track)

    - + cvc5 0 0 @@ -130,7 +130,7 @@

    UFBVLIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 0 @@ -152,7 +152,7 @@

    UFBVLIA (Proof Exhibition Track)

    - + cvc5 0 00.9370.93280 @@ -161,7 +161,7 @@

    UFBVLIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 00.940.93580 @@ -185,7 +185,6 @@

    UFBVLIA (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/ufbvlia-single-query.html b/archive/2022/results/ufbvlia-single-query.html index c0bf32ea..9ccdd4b5 100644 --- a/archive/2022/results/ufbvlia-single-query.html +++ b/archive/2022/results/ufbvlia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFBVLIA (Single Query Track)

    Competition results for the UFBVLIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UFBVLIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5— - - + + cvc5 - - + + — - + @@ -131,7 +131,7 @@

    UFBVLIA (Single Query Track)

    - + z3-4.8.17n 0 4 @@ -142,7 +142,7 @@

    UFBVLIA (Single Query Track)

    - + 2021-cvc5n 0 2 @@ -153,7 +153,7 @@

    UFBVLIA (Single Query Track)

    - + cvc5 0 2 @@ -164,7 +164,7 @@

    UFBVLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -186,7 +186,7 @@

    UFBVLIA (Single Query Track)

    - + z3-4.8.17n 0 4245443.556245443.672404204204 @@ -195,7 +195,7 @@

    UFBVLIA (Single Query Track)

    - + 2021-cvc5n 0 2247211.809247211.809202206206 @@ -204,7 +204,7 @@

    UFBVLIA (Single Query Track)

    - + cvc5 0 2247969.636248000.018202206206 @@ -213,7 +213,7 @@

    UFBVLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0157071.11154309.426000208118 @@ -233,7 +233,7 @@

    UFBVLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 01677.7711183.47700020188118 @@ -242,7 +242,7 @@

    UFBVLIA (Single Query Track)

    - + 2021-cvc5n 0 024000.024000.000020188206 @@ -251,7 +251,7 @@

    UFBVLIA (Single Query Track)

    - + cvc5 0 024000.024000.000020188206 @@ -260,7 +260,7 @@

    UFBVLIA (Single Query Track)

    - + z3-4.8.17n 0 024000.024000.000020188204 @@ -280,7 +280,7 @@

    UFBVLIA (Single Query Track)

    - + z3-4.8.17n 0 45443.5565443.6724044200204 @@ -289,7 +289,7 @@

    UFBVLIA (Single Query Track)

    - + 2021-cvc5n 0 27211.8097211.8092026200206 @@ -298,7 +298,7 @@

    UFBVLIA (Single Query Track)

    - + cvc5 0 27969.6368000.0182026200206 @@ -307,7 +307,7 @@

    UFBVLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 01393.5761200.7530008200118 @@ -327,7 +327,7 @@

    UFBVLIA (Single Query Track)

    - + 2021-cvc5n 0 24955.8094955.809202206206 @@ -336,7 +336,7 @@

    UFBVLIA (Single Query Track)

    - + z3-4.8.17n 0 24967.5524967.552202206206 @@ -345,7 +345,7 @@

    UFBVLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 05011.984991.131000208207 @@ -354,7 +354,7 @@

    UFBVLIA (Single Query Track)

    - + cvc5 0 04992.04992.0000208208 @@ -378,7 +378,6 @@

    UFBVLIA (Single Query Track)

    - + - diff --git a/archive/2022/results/ufdt-cloud.html b/archive/2022/results/ufdt-cloud.html index e0bc7dac..6f851fde 100644 --- a/archive/2022/results/ufdt-cloud.html +++ b/archive/2022/results/ufdt-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFDT (Cloud Track)

    Competition results for the UFDT - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    UFDT (Cloud Track)

    - + Vampire 0 1332598.7221367250 @@ -126,7 +126,7 @@

    UFDT (Cloud Track)

    - + cvc5-cloud 1 045600.0000380 @@ -146,7 +146,7 @@

    UFDT (Cloud Track)

    - + Vampire 0 64715.3256600320 @@ -155,7 +155,7 @@

    UFDT (Cloud Track)

    - + cvc5-cloud 0 07200.00006320 @@ -175,7 +175,7 @@

    UFDT (Cloud Track)

    - + Vampire 0 7283.3977070310 @@ -184,7 +184,7 @@

    UFDT (Cloud Track)

    - + cvc5-cloud 1 08400.00007310 @@ -204,7 +204,7 @@

    UFDT (Cloud Track)

    - + Vampire 0 3842.1953033533 @@ -213,7 +213,7 @@

    UFDT (Cloud Track)

    - + cvc5-cloud 0 0912.00003838 @@ -237,7 +237,6 @@

    UFDT (Cloud Track)

    - + - diff --git a/archive/2022/results/ufdt-parallel.html b/archive/2022/results/ufdt-parallel.html index f7b4bc59..8f021ae3 100644 --- a/archive/2022/results/ufdt-parallel.html +++ b/archive/2022/results/ufdt-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFDT (Parallel Track)

    Competition results for the UFDT - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    UFDT (Parallel Track)

    - + Vampire 0 1331795.251358250 @@ -137,7 +137,7 @@

    UFDT (Parallel Track)

    - + Vampire 0 5931.4695500330 @@ -157,7 +157,7 @@

    UFDT (Parallel Track)

    - + Vampire 0 8863.7818080300 @@ -177,7 +177,7 @@

    UFDT (Parallel Track)

    - + Vampire 0 7787.6357073131 @@ -201,7 +201,6 @@

    UFDT (Parallel Track)

    - + - diff --git a/archive/2022/results/ufdt-proof-exhibition.html b/archive/2022/results/ufdt-proof-exhibition.html index cf0074cd..6fad384d 100644 --- a/archive/2022/results/ufdt-proof-exhibition.html +++ b/archive/2022/results/ufdt-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFDT (Proof Exhibition Track)

    Competition results for the UFDT - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    UFDT (Proof Exhibition Track)

    - + cvc5 0 1048 @@ -130,7 +130,7 @@

    UFDT (Proof Exhibition Track)

    - + cvc5-lfsc 0 1035 @@ -141,7 +141,7 @@

    UFDT (Proof Exhibition Track)

    - + smtinterpol 0 673 @@ -163,7 +163,7 @@

    UFDT (Proof Exhibition Track)

    - + cvc5 0 1048129805.66129797.257106105 @@ -172,7 +172,7 @@

    UFDT (Proof Exhibition Track)

    - + cvc5-lfsc 0 1035129502.19129501.373119104 @@ -181,7 +181,7 @@

    UFDT (Proof Exhibition Track)

    - + smtinterpol 0 673704388.85559040.311481405 @@ -205,7 +205,6 @@

    UFDT (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/ufdt-single-query.html b/archive/2022/results/ufdt-single-query.html index e681991d..cafe56de 100644 --- a/archive/2022/results/ufdt-single-query.html +++ b/archive/2022/results/ufdt-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFDT (Single Query Track)

    Competition results for the UFDT - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UFDT (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    UFDT (Single Query Track)

    - + 2020-CVC4n 0 534 @@ -142,7 +142,7 @@

    UFDT (Single Query Track)

    - + cvc5 0 527 @@ -153,7 +153,7 @@

    UFDT (Single Query Track)

    - + Vampire 0 357 @@ -164,7 +164,7 @@

    UFDT (Single Query Track)

    - + z3-4.8.17n 0 241 @@ -175,7 +175,7 @@

    UFDT (Single Query Track)

    - + smtinterpol 0 96 @@ -186,7 +186,7 @@

    UFDT (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -208,7 +208,7 @@

    UFDT (Single Query Track)

    - + 2020-CVC4n 0 5341293697.2461296277.64653412441010161016 @@ -217,7 +217,7 @@

    UFDT (Single Query Track)

    - + cvc5 0 5271304124.3981306144.42252712240510231023 @@ -226,7 +226,7 @@

    UFDT (Single Query Track)

    - + Vampire 0 4021784998.0071418315.9834021438811481144 @@ -235,7 +235,7 @@

    UFDT (Single Query Track)

    - + z3-4.8.17n 0 2411236579.1361243333.648241122291309816 @@ -244,7 +244,7 @@

    UFDT (Single Query Track)

    - + smtinterpol 0 961727325.8841688281.1379619514541384 @@ -253,7 +253,7 @@

    UFDT (Single Query Track)

    - + UltimateEliminator+MathSAT 0 07422.3854363.40100015500 @@ -273,7 +273,7 @@

    UFDT (Single Query Track)

    - + 2020-CVC4n 0 12461129.31663397.4111241240414221016 @@ -282,7 +282,7 @@

    UFDT (Single Query Track)

    - + cvc5 0 12265221.81966956.3031221220614221023 @@ -291,7 +291,7 @@

    UFDT (Single Query Track)

    - + Vampire 0 14229527.85151954.3081414011414221144 @@ -300,7 +300,7 @@

    UFDT (Single Query Track)

    - + z3-4.8.17n 0 12111966.945114491.068121201161422816 @@ -309,7 +309,7 @@

    UFDT (Single Query Track)

    - + smtinterpol 0 1137562.192133857.2411012714221384 @@ -318,7 +318,7 @@

    UFDT (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0620.191358.33300012814220 @@ -338,7 +338,7 @@

    UFDT (Single Query Track)

    - + 2020-CVC4n 0 41073367.92973680.23541004104610941016 @@ -347,7 +347,7 @@

    UFDT (Single Query Track)

    - + cvc5 0 40579702.5879988.11840504055110941023 @@ -356,7 +356,7 @@

    UFDT (Single Query Track)

    - + Vampire 0 388209061.617107425.95538803886810941144 @@ -365,7 +365,7 @@

    UFDT (Single Query Track)

    - + z3-4.8.17n 0 229204036.76204603.69922902292271094816 @@ -374,7 +374,7 @@

    UFDT (Single Query Track)

    - + smtinterpol 0 95448721.597433572.959509536110941384 @@ -383,7 +383,7 @@

    UFDT (Single Query Track)

    - + UltimateEliminator+MathSAT 0 02160.6381291.77700045610940 @@ -403,7 +403,7 @@

    UFDT (Single Query Track)

    - + 2020-CVC4n 0 32929730.76229730.746329232712211221 @@ -412,7 +412,7 @@

    UFDT (Single Query Track)

    - + cvc5 0 32329802.1229802.055323232112271227 @@ -421,7 +421,7 @@

    UFDT (Single Query Track)

    - + Vampire 0 28134073.55531347.994281028112691266 @@ -430,7 +430,7 @@

    UFDT (Single Query Track)

    - + z3-4.8.17n 0 22531887.26531886.8492251221313251295 @@ -439,7 +439,7 @@

    UFDT (Single Query Track)

    - + smtinterpol 0 5335418.89135059.7325315214971448 @@ -448,7 +448,7 @@

    UFDT (Single Query Track)

    - + UltimateEliminator+MathSAT 0 07422.3854363.40100015500 @@ -472,7 +472,6 @@

    UFDT (Single Query Track)

    - + - diff --git a/archive/2022/results/ufdt-unsat-core.html b/archive/2022/results/ufdt-unsat-core.html index 45447394..a22b3666 100644 --- a/archive/2022/results/ufdt-unsat-core.html +++ b/archive/2022/results/ufdt-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFDT (Unsat Core Track)

    Competition results for the UFDT - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    UFDT (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5Vampire - - + + @@ -126,7 +126,7 @@

    UFDT (Unsat Core Track)

    - + 2020-CVC4-ucn 0 338990 @@ -137,7 +137,7 @@

    UFDT (Unsat Core Track)

    - + cvc5 0 336005 @@ -148,7 +148,7 @@

    UFDT (Unsat Core Track)

    - + Vampire 0 335886 @@ -159,7 +159,7 @@

    UFDT (Unsat Core Track)

    - + z3-4.8.17n 0 300866 @@ -170,7 +170,7 @@

    UFDT (Unsat Core Track)

    - + smtinterpol 0 214962 @@ -181,7 +181,7 @@

    UFDT (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -203,7 +203,7 @@

    UFDT (Unsat Core Track)

    - + Vampire 0 345585186713.99898326.99958 @@ -212,7 +212,7 @@

    UFDT (Unsat Core Track)

    - + 2020-CVC4-ucn 0 338990149830.613149827.526121 @@ -221,7 +221,7 @@

    UFDT (Unsat Core Track)

    - + cvc5 0 336005150286.554150262.58122 @@ -230,7 +230,7 @@

    UFDT (Unsat Core Track)

    - + z3-4.8.17n 0 300866201470.295202229.526111 @@ -239,7 +239,7 @@

    UFDT (Unsat Core Track)

    - + smtinterpol 0 215636626007.904592937.307469 @@ -248,7 +248,7 @@

    UFDT (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 05534.4623276.4870 @@ -272,7 +272,6 @@

    UFDT (Unsat Core Track)

    - + - diff --git a/archive/2022/results/ufdtlia-cloud.html b/archive/2022/results/ufdtlia-cloud.html index d017b610..6c8647c2 100644 --- a/archive/2022/results/ufdtlia-cloud.html +++ b/archive/2022/results/ufdtlia-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFDTLIA (Cloud Track)

    Competition results for the UFDTLIA - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    UFDTLIA (Cloud Track)

    - + cvc5-cloud 0 08400.000070 @@ -126,7 +126,7 @@

    UFDTLIA (Cloud Track)

    - + Vampire 0 08400.000070 @@ -146,7 +146,7 @@

    UFDTLIA (Cloud Track)

    - + cvc5-cloud 0 00.0000070 @@ -155,7 +155,7 @@

    UFDTLIA (Cloud Track)

    - + Vampire 0 00.0000070 @@ -175,7 +175,7 @@

    UFDTLIA (Cloud Track)

    - + cvc5-cloud 0 00.0000070 @@ -184,7 +184,7 @@

    UFDTLIA (Cloud Track)

    - + Vampire 0 00.0000070 @@ -204,7 +204,7 @@

    UFDTLIA (Cloud Track)

    - + cvc5-cloud 0 0168.000077 @@ -213,7 +213,7 @@

    UFDTLIA (Cloud Track)

    - + Vampire 0 0168.000077 @@ -237,7 +237,6 @@

    UFDTLIA (Cloud Track)

    - + - diff --git a/archive/2022/results/ufdtlia-parallel.html b/archive/2022/results/ufdtlia-parallel.html index a15ad9d6..17343a20 100644 --- a/archive/2022/results/ufdtlia-parallel.html +++ b/archive/2022/results/ufdtlia-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFDTLIA (Parallel Track)

    Competition results for the UFDTLIA - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    UFDTLIA (Parallel Track)

    - + Vampire 0 08400.000070 @@ -137,7 +137,7 @@

    UFDTLIA (Parallel Track)

    - + Vampire 0 00.0000070 @@ -157,7 +157,7 @@

    UFDTLIA (Parallel Track)

    - + Vampire 0 00.0000070 @@ -177,7 +177,7 @@

    UFDTLIA (Parallel Track)

    - + Vampire 0 0168.000077 @@ -201,7 +201,6 @@

    UFDTLIA (Parallel Track)

    - + - diff --git a/archive/2022/results/ufdtlia-proof-exhibition.html b/archive/2022/results/ufdtlia-proof-exhibition.html index 228c2f65..8d3f314b 100644 --- a/archive/2022/results/ufdtlia-proof-exhibition.html +++ b/archive/2022/results/ufdtlia-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFDTLIA (Proof Exhibition Track)

    Competition results for the UFDTLIA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    UFDTLIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 33 @@ -130,7 +130,7 @@

    UFDTLIA (Proof Exhibition Track)

    - + cvc5 0 32 @@ -141,7 +141,7 @@

    UFDTLIA (Proof Exhibition Track)

    - + smtinterpol 0 26 @@ -163,7 +163,7 @@

    UFDTLIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 33205838.499205839.456169169 @@ -172,7 +172,7 @@

    UFDTLIA (Proof Exhibition Track)

    - + cvc5 0 32205974.218205967.07170169 @@ -181,7 +181,7 @@

    UFDTLIA (Proof Exhibition Track)

    - + smtinterpol 0 26214616.339211788.309176175 @@ -205,7 +205,6 @@

    UFDTLIA (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/ufdtlia-single-query.html b/archive/2022/results/ufdtlia-single-query.html index 82656266..aa88dac6 100644 --- a/archive/2022/results/ufdtlia-single-query.html +++ b/archive/2022/results/ufdtlia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFDTLIA (Single Query Track)

    Competition results for the UFDTLIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UFDTLIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5— - - + + cvc5 - - + + Vampire - + @@ -131,7 +131,7 @@

    UFDTLIA (Single Query Track)

    - + 2020-CVC4n 0 195 @@ -142,7 +142,7 @@

    UFDTLIA (Single Query Track)

    - + cvc5 0 192 @@ -153,7 +153,7 @@

    UFDTLIA (Single Query Track)

    - + Vampire 0 128 @@ -164,7 +164,7 @@

    UFDTLIA (Single Query Track)

    - + z3-4.8.17n 0 38 @@ -175,7 +175,7 @@

    UFDTLIA (Single Query Track)

    - + smtinterpol 0 26 @@ -186,7 +186,7 @@

    UFDTLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -208,7 +208,7 @@

    UFDTLIA (Single Query Track)

    - + 2020-CVC4n 0 195151268.129152439.47619501958282 @@ -217,7 +217,7 @@

    UFDTLIA (Single Query Track)

    - + cvc5 0 192158680.898160359.14219201928585 @@ -226,7 +226,7 @@

    UFDTLIA (Single Query Track)

    - + Vampire 0 128205441.108177361.5481280128149147 @@ -235,7 +235,7 @@

    UFDTLIA (Single Query Track)

    - + z3-4.8.17n 0 38254962.692258408.90938038239200 @@ -244,7 +244,7 @@

    UFDTLIA (Single Query Track)

    - + smtinterpol 0 26303195.177302937.07426026251251 @@ -253,7 +253,7 @@

    UFDTLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 01288.557768.7350002770 @@ -273,7 +273,7 @@

    UFDTLIA (Single Query Track)

    - + 2020-CVC4n 0 00.00.0000027782 @@ -282,7 +282,7 @@

    UFDTLIA (Single Query Track)

    - + cvc5 0 00.00.0000027785 @@ -291,7 +291,7 @@

    UFDTLIA (Single Query Track)

    - + smtinterpol 0 00.00.00000277251 @@ -300,7 +300,7 @@

    UFDTLIA (Single Query Track)

    - + Vampire 0 00.00.00000277147 @@ -309,7 +309,7 @@

    UFDTLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.000002770 @@ -318,7 +318,7 @@

    UFDTLIA (Single Query Track)

    - + z3-4.8.17n 0 00.00.00000277200 @@ -338,7 +338,7 @@

    UFDTLIA (Single Query Track)

    - + 2020-CVC4n 0 19563668.12964839.476195019597382 @@ -347,7 +347,7 @@

    UFDTLIA (Single Query Track)

    - + cvc5 0 19271080.89872759.1421920192127385 @@ -356,7 +356,7 @@

    UFDTLIA (Single Query Track)

    - + Vampire 0 128107040.68889770.13812801287673147 @@ -365,7 +365,7 @@

    UFDTLIA (Single Query Track)

    - + z3-4.8.17n 0 38174232.825177131.8853803816673200 @@ -374,7 +374,7 @@

    UFDTLIA (Single Query Track)

    - + smtinterpol 0 26215595.177215337.0742602617873251 @@ -383,7 +383,7 @@

    UFDTLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0941.287568.065000204730 @@ -403,7 +403,7 @@

    UFDTLIA (Single Query Track)

    - + Vampire 0 1204343.6213890.7211200120157155 @@ -412,7 +412,7 @@

    UFDTLIA (Single Query Track)

    - + z3-4.8.17n 0 375809.7365809.67137037240240 @@ -421,7 +421,7 @@

    UFDTLIA (Single Query Track)

    - + 2020-CVC4n 0 266064.1156064.10926026251251 @@ -430,7 +430,7 @@

    UFDTLIA (Single Query Track)

    - + cvc5 0 246108.2446108.24424024253253 @@ -439,7 +439,7 @@

    UFDTLIA (Single Query Track)

    - + smtinterpol 0 216257.046188.79421021256256 @@ -448,7 +448,7 @@

    UFDTLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 01288.557768.7350002770 @@ -472,7 +472,6 @@

    UFDTLIA (Single Query Track)

    - + - diff --git a/archive/2022/results/ufdtlia-unsat-core.html b/archive/2022/results/ufdtlia-unsat-core.html index d600a126..cff9c25f 100644 --- a/archive/2022/results/ufdtlia-unsat-core.html +++ b/archive/2022/results/ufdtlia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFDTLIA (Unsat Core Track)

    Competition results for the UFDTLIA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    UFDTLIA (Unsat Core Track)

    Sequential PerformanceParallel Performance VampireVampire - - + + @@ -126,7 +126,7 @@

    UFDTLIA (Unsat Core Track)

    - + Vampire 0 1553 @@ -137,7 +137,7 @@

    UFDTLIA (Unsat Core Track)

    - + z3-4.8.17n 0 484 @@ -148,7 +148,7 @@

    UFDTLIA (Unsat Core Track)

    - + 2021-cvc5-ucn 0 464 @@ -159,7 +159,7 @@

    UFDTLIA (Unsat Core Track)

    - + cvc5 0 427 @@ -170,7 +170,7 @@

    UFDTLIA (Unsat Core Track)

    - + smtinterpol 0 239 @@ -181,7 +181,7 @@

    UFDTLIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -203,7 +203,7 @@

    UFDTLIA (Unsat Core Track)

    - + Vampire 0 155394252.35388379.6873 @@ -212,7 +212,7 @@

    UFDTLIA (Unsat Core Track)

    - + z3-4.8.17n 0 484173744.43174636.232134 @@ -221,7 +221,7 @@

    UFDTLIA (Unsat Core Track)

    - + 2021-cvc5-ucn 0 464207627.542207592.259171 @@ -230,7 +230,7 @@

    UFDTLIA (Unsat Core Track)

    - + cvc5 0 427208478.233208478.749172 @@ -239,7 +239,7 @@

    UFDTLIA (Unsat Core Track)

    - + smtinterpol 0 239213212.898212953.861176 @@ -248,7 +248,7 @@

    UFDTLIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0954.138559.2720 @@ -272,7 +272,6 @@

    UFDTLIA (Unsat Core Track)

    - + - diff --git a/archive/2022/results/ufdtlira-cloud.html b/archive/2022/results/ufdtlira-cloud.html index 923528cf..8828c02c 100644 --- a/archive/2022/results/ufdtlira-cloud.html +++ b/archive/2022/results/ufdtlira-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFDTLIRA (Cloud Track)

    Competition results for the UFDTLIRA - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    UFDTLIRA (Cloud Track)

    - + cvc5-cloud 0 203935.4442002030 @@ -126,7 +126,7 @@

    UFDTLIRA (Cloud Track)

    - + Vampire 0 027600.0000230 @@ -146,7 +146,7 @@

    UFDTLIRA (Cloud Track)

    - + cvc5-cloud 0 00.00000230 @@ -155,7 +155,7 @@

    UFDTLIRA (Cloud Track)

    - + Vampire 0 00.00000230 @@ -175,7 +175,7 @@

    UFDTLIRA (Cloud Track)

    - + cvc5-cloud 0 20335.44420020030 @@ -184,7 +184,7 @@

    UFDTLIRA (Cloud Track)

    - + Vampire 0 024000.00002030 @@ -204,7 +204,7 @@

    UFDTLIRA (Cloud Track)

    - + cvc5-cloud 0 17387.5611701766 @@ -213,7 +213,7 @@

    UFDTLIRA (Cloud Track)

    - + Vampire 0 0552.00002323 @@ -237,7 +237,6 @@

    UFDTLIRA (Cloud Track)

    - + - diff --git a/archive/2022/results/ufdtlira-parallel.html b/archive/2022/results/ufdtlira-parallel.html index 7e078240..6062772e 100644 --- a/archive/2022/results/ufdtlira-parallel.html +++ b/archive/2022/results/ufdtlira-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFDTLIRA (Parallel Track)

    Competition results for the UFDTLIRA - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    UFDTLIRA (Parallel Track)

    - + Vampire 0 027600.0000230 @@ -137,7 +137,7 @@

    UFDTLIRA (Parallel Track)

    - + Vampire 0 00.00000230 @@ -157,7 +157,7 @@

    UFDTLIRA (Parallel Track)

    - + Vampire 0 00.00000230 @@ -177,7 +177,7 @@

    UFDTLIRA (Parallel Track)

    - + Vampire 0 0552.00002323 @@ -201,7 +201,6 @@

    UFDTLIRA (Parallel Track)

    - + - diff --git a/archive/2022/results/ufdtlira-proof-exhibition.html b/archive/2022/results/ufdtlira-proof-exhibition.html index 6b1d972a..7499f8c8 100644 --- a/archive/2022/results/ufdtlira-proof-exhibition.html +++ b/archive/2022/results/ufdtlira-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFDTLIRA (Proof Exhibition Track)

    Competition results for the UFDTLIRA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    UFDTLIRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 2911 @@ -130,7 +130,7 @@

    UFDTLIRA (Proof Exhibition Track)

    - + cvc5 0 2898 @@ -141,7 +141,7 @@

    UFDTLIRA (Proof Exhibition Track)

    - + smtinterpol 0 2773 @@ -163,7 +163,7 @@

    UFDTLIRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 29112917.0232913.48552 @@ -172,7 +172,7 @@

    UFDTLIRA (Proof Exhibition Track)

    - + cvc5 0 289810616.95810348.065184 @@ -181,7 +181,7 @@

    UFDTLIRA (Proof Exhibition Track)

    - + smtinterpol 0 2773202860.637173893.696143130 @@ -205,7 +205,6 @@

    UFDTLIRA (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/ufdtlira-single-query.html b/archive/2022/results/ufdtlira-single-query.html index e8e00696..e0b524bb 100644 --- a/archive/2022/results/ufdtlira-single-query.html +++ b/archive/2022/results/ufdtlira-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFDTLIRA (Single Query Track)

    Competition results for the UFDTLIRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UFDTLIRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    UFDTLIRA (Single Query Track)

    - + z3-4.8.17n 0 2753 @@ -142,7 +142,7 @@

    UFDTLIRA (Single Query Track)

    - + cvc5 0 2713 @@ -153,7 +153,7 @@

    UFDTLIRA (Single Query Track)

    - + smtinterpol 0 2600 @@ -164,7 +164,7 @@

    UFDTLIRA (Single Query Track)

    - + 2020-CVC4n 0 2234 @@ -175,7 +175,7 @@

    UFDTLIRA (Single Query Track)

    - + Vampire 0 2113 @@ -186,7 +186,7 @@

    UFDTLIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -208,7 +208,7 @@

    UFDTLIRA (Single Query Track)

    - + z3-4.8.17n 0 2753448406.639448401.77527535822171376370 @@ -217,7 +217,7 @@

    UFDTLIRA (Single Query Track)

    - + cvc5 0 2713222048.999222134.1327135402173416177 @@ -226,7 +226,7 @@

    UFDTLIRA (Single Query Track)

    - + smtinterpol 0 2600174647.043163961.08426004982102529125 @@ -235,7 +235,7 @@

    UFDTLIRA (Single Query Track)

    - + 2020-CVC4n 0 22348569.4588569.09322346421708957 @@ -244,7 +244,7 @@

    UFDTLIRA (Single Query Track)

    - + Vampire 0 21291497464.3031209820.17521291221171000982 @@ -253,7 +253,7 @@

    UFDTLIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 014606.0228720.1600031290 @@ -273,7 +273,7 @@

    UFDTLIRA (Single Query Track)

    - + z3-4.8.17n 0 582490.51489.499582582002547370 @@ -282,7 +282,7 @@

    UFDTLIRA (Single Query Track)

    - + cvc5 0 54010268.9910278.095405400422547177 @@ -291,7 +291,7 @@

    UFDTLIRA (Single Query Track)

    - + smtinterpol 0 498595.591326.2584984980842547125 @@ -300,7 +300,7 @@

    UFDTLIRA (Single Query Track)

    - + 2020-CVC4n 0 6412.90712.8656464051825477 @@ -309,7 +309,7 @@

    UFDTLIRA (Single Query Track)

    - + Vampire 0 12783874.2684563.097121205702547982 @@ -318,7 +318,7 @@

    UFDTLIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 02713.6971623.66200058225470 @@ -338,7 +338,7 @@

    UFDTLIRA (Single Query Track)

    - + cvc5 0 21732926.0142981.0892173021731955177 @@ -347,7 +347,7 @@

    UFDTLIRA (Single Query Track)

    - + z3-4.8.17n 0 21713741.1433737.0452171021713955370 @@ -356,7 +356,7 @@

    UFDTLIRA (Single Query Track)

    - + 2020-CVC4n 0 21704933.3754933.0821700217049557 @@ -365,7 +365,7 @@

    UFDTLIRA (Single Query Track)

    - + Vampire 0 2117183185.45377734.51821170211757955982 @@ -374,7 +374,7 @@

    UFDTLIRA (Single Query Track)

    - + smtinterpol 0 2102100590.8190712.15221020210272955125 @@ -383,7 +383,7 @@

    UFDTLIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 010146.4116057.74100021749550 @@ -403,7 +403,7 @@

    UFDTLIRA (Single Query Track)

    - + z3-4.8.17n 0 27489300.4459295.25627485772171381379 @@ -412,7 +412,7 @@

    UFDTLIRA (Single Query Track)

    - + cvc5 0 27016702.6396706.84127015342167428242 @@ -421,7 +421,7 @@

    UFDTLIRA (Single Query Track)

    - + smtinterpol 0 256310524.8056916.82125634982065566170 @@ -430,7 +430,7 @@

    UFDTLIRA (Single Query Track)

    - + 2020-CVC4n 0 2234337.458337.09322346421708957 @@ -439,7 +439,7 @@

    UFDTLIRA (Single Query Track)

    - + Vampire 0 188435367.80631137.88518840188412451228 @@ -448,7 +448,7 @@

    UFDTLIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 014606.0228720.1600031290 @@ -472,7 +472,6 @@

    UFDTLIRA (Single Query Track)

    - + - diff --git a/archive/2022/results/ufdtlira-unsat-core.html b/archive/2022/results/ufdtlira-unsat-core.html index 82c9f0a2..ada1d1a9 100644 --- a/archive/2022/results/ufdtlira-unsat-core.html +++ b/archive/2022/results/ufdtlira-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFDTLIRA (Unsat Core Track)

    Competition results for the UFDTLIRA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    UFDTLIRA (Unsat Core Track)

    Sequential PerformanceParallel Performance smtinterpolsmtinterpol - - + + @@ -126,7 +126,7 @@

    UFDTLIRA (Unsat Core Track)

    - + 2021-cvc5-ucn 0 77174 @@ -137,7 +137,7 @@

    UFDTLIRA (Unsat Core Track)

    - + z3-4.8.17n 0 77158 @@ -148,7 +148,7 @@

    UFDTLIRA (Unsat Core Track)

    - + smtinterpol 0 74398 @@ -159,7 +159,7 @@

    UFDTLIRA (Unsat Core Track)

    - + cvc5 0 74202 @@ -170,7 +170,7 @@

    UFDTLIRA (Unsat Core Track)

    - + Vampire 0 17743 @@ -181,7 +181,7 @@

    UFDTLIRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -203,7 +203,7 @@

    UFDTLIRA (Unsat Core Track)

    - + 2021-cvc5-ucn 0 771743907.2713944.4893 @@ -212,7 +212,7 @@

    UFDTLIRA (Unsat Core Track)

    - + z3-4.8.17n 0 771583628.243625.4472 @@ -221,7 +221,7 @@

    UFDTLIRA (Unsat Core Track)

    - + smtinterpol 0 74398105688.30596950.17772 @@ -230,7 +230,7 @@

    UFDTLIRA (Unsat Core Track)

    - + cvc5 0 742027670.9037671.7616 @@ -239,7 +239,7 @@

    UFDTLIRA (Unsat Core Track)

    - + Vampire 0 17743162956.22377243.4441 @@ -248,7 +248,7 @@

    UFDTLIRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 013475.8278390.3720 @@ -272,7 +272,6 @@

    UFDTLIRA (Unsat Core Track)

    - + - diff --git a/archive/2022/results/ufdtnia-incremental.html b/archive/2022/results/ufdtnia-incremental.html index 44eafff4..898e8864 100644 --- a/archive/2022/results/ufdtnia-incremental.html +++ b/archive/2022/results/ufdtnia-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFDTNIA (Incremental Track)

    Competition results for the UFDTNIA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    UFDTNIA (Incremental Track)

    Parallel Performance cvc5 - - + + @@ -124,7 +124,7 @@

    UFDTNIA (Incremental Track)

    - + cvc5 0 56014027.68614023.9161869 @@ -133,7 +133,7 @@

    UFDTNIA (Incremental Track)

    - + smtinterpol 0 069.373.7527460 @@ -142,7 +142,7 @@

    UFDTNIA (Incremental Track)

    - + UltimateEliminator+MathSAT 0 0563.118243.6737460 @@ -166,7 +166,6 @@

    UFDTNIA (Incremental Track)

    - + - diff --git a/archive/2022/results/ufdtnia-proof-exhibition.html b/archive/2022/results/ufdtnia-proof-exhibition.html index 7994bf5a..2b75d5a6 100644 --- a/archive/2022/results/ufdtnia-proof-exhibition.html +++ b/archive/2022/results/ufdtnia-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFDTNIA (Proof Exhibition Track)

    Competition results for the UFDTNIA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    UFDTNIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 1 @@ -130,7 +130,7 @@

    UFDTNIA (Proof Exhibition Track)

    - + cvc5 0 1 @@ -152,7 +152,7 @@

    UFDTNIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 13.7233.72300 @@ -161,7 +161,7 @@

    UFDTNIA (Proof Exhibition Track)

    - + cvc5 0 14.434.39100 @@ -185,7 +185,6 @@

    UFDTNIA (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/ufdtnia-single-query.html b/archive/2022/results/ufdtnia-single-query.html index 1cd19116..49b66c9c 100644 --- a/archive/2022/results/ufdtnia-single-query.html +++ b/archive/2022/results/ufdtnia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFDTNIA (Single Query Track)

    Competition results for the UFDTNIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UFDTNIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) VampireVampire— - - + + Vampire - - + + Vampire - + @@ -131,7 +131,7 @@

    UFDTNIA (Single Query Track)

    - + Vampire 0 1 @@ -142,7 +142,7 @@

    UFDTNIA (Single Query Track)

    - + z3-4.8.17n 0 1 @@ -153,7 +153,7 @@

    UFDTNIA (Single Query Track)

    - + cvc5 0 1 @@ -164,7 +164,7 @@

    UFDTNIA (Single Query Track)

    - + 2020-CVC4n 0 1 @@ -175,7 +175,7 @@

    UFDTNIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -197,7 +197,7 @@

    UFDTNIA (Single Query Track)

    - + Vampire 0 10.0850.08510100 @@ -206,7 +206,7 @@

    UFDTNIA (Single Query Track)

    - + z3-4.8.17n 0 10.2550.25310100 @@ -215,7 +215,7 @@

    UFDTNIA (Single Query Track)

    - + cvc5 0 17.3447.34810100 @@ -224,7 +224,7 @@

    UFDTNIA (Single Query Track)

    - + 2020-CVC4n 0 17.4017.40110100 @@ -233,7 +233,7 @@

    UFDTNIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 04.4582.69900010 @@ -253,7 +253,7 @@

    UFDTNIA (Single Query Track)

    - + 2020-CVC4n 0 00.00.0000010 @@ -262,7 +262,7 @@

    UFDTNIA (Single Query Track)

    - + cvc5 0 00.00.0000010 @@ -271,7 +271,7 @@

    UFDTNIA (Single Query Track)

    - + Vampire 0 00.00.0000010 @@ -280,7 +280,7 @@

    UFDTNIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.0000010 @@ -289,7 +289,7 @@

    UFDTNIA (Single Query Track)

    - + z3-4.8.17n 0 00.00.0000010 @@ -309,7 +309,7 @@

    UFDTNIA (Single Query Track)

    - + Vampire 0 10.0850.085101000 @@ -318,7 +318,7 @@

    UFDTNIA (Single Query Track)

    - + z3-4.8.17n 0 10.2550.253101000 @@ -327,7 +327,7 @@

    UFDTNIA (Single Query Track)

    - + cvc5 0 17.3447.348101000 @@ -336,7 +336,7 @@

    UFDTNIA (Single Query Track)

    - + 2020-CVC4n 0 17.4017.401101000 @@ -345,7 +345,7 @@

    UFDTNIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 04.4582.699000100 @@ -365,7 +365,7 @@

    UFDTNIA (Single Query Track)

    - + Vampire 0 10.0850.08510100 @@ -374,7 +374,7 @@

    UFDTNIA (Single Query Track)

    - + z3-4.8.17n 0 10.2550.25310100 @@ -383,7 +383,7 @@

    UFDTNIA (Single Query Track)

    - + cvc5 0 17.3447.34810100 @@ -392,7 +392,7 @@

    UFDTNIA (Single Query Track)

    - + 2020-CVC4n 0 17.4017.40110100 @@ -401,7 +401,7 @@

    UFDTNIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 04.4582.69900010 @@ -425,7 +425,6 @@

    UFDTNIA (Single Query Track)

    - + - diff --git a/archive/2022/results/ufdtnia-unsat-core.html b/archive/2022/results/ufdtnia-unsat-core.html index d7841a67..9c3ccda6 100644 --- a/archive/2022/results/ufdtnia-unsat-core.html +++ b/archive/2022/results/ufdtnia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFDTNIA (Unsat Core Track)

    Competition results for the UFDTNIA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    UFDTNIA (Unsat Core Track)

    Sequential PerformanceParallel Performance VampireVampire - - + + @@ -126,7 +126,7 @@

    UFDTNIA (Unsat Core Track)

    - + Vampire 0 24 @@ -137,7 +137,7 @@

    UFDTNIA (Unsat Core Track)

    - + z3-4.8.17n 0 23 @@ -148,7 +148,7 @@

    UFDTNIA (Unsat Core Track)

    - + cvc5 0 23 @@ -159,7 +159,7 @@

    UFDTNIA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 23 @@ -170,7 +170,7 @@

    UFDTNIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -192,7 +192,7 @@

    UFDTNIA (Unsat Core Track)

    - + Vampire 0 240.0860.0860 @@ -201,7 +201,7 @@

    UFDTNIA (Unsat Core Track)

    - + z3-4.8.17n 0 230.5130.5110 @@ -210,7 +210,7 @@

    UFDTNIA (Unsat Core Track)

    - + cvc5 0 233.2343.2340 @@ -219,7 +219,7 @@

    UFDTNIA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 238.1078.1080 @@ -228,7 +228,7 @@

    UFDTNIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 04.5012.8550 @@ -252,7 +252,6 @@

    UFDTNIA (Unsat Core Track)

    - + - diff --git a/archive/2022/results/ufdtnira-cloud.html b/archive/2022/results/ufdtnira-cloud.html index 9edc5e9e..37524a9e 100644 --- a/archive/2022/results/ufdtnira-cloud.html +++ b/archive/2022/results/ufdtnira-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFDTNIRA (Cloud Track)

    Competition results for the UFDTNIRA - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    UFDTNIRA (Cloud Track)

    - + cvc5-cloud 0 14833.15410140 @@ -126,7 +126,7 @@

    UFDTNIRA (Cloud Track)

    - + Vampire 0 06000.000050 @@ -146,7 +146,7 @@

    UFDTNIRA (Cloud Track)

    - + cvc5-cloud 0 00.0000050 @@ -155,7 +155,7 @@

    UFDTNIRA (Cloud Track)

    - + Vampire 0 00.0000050 @@ -175,7 +175,7 @@

    UFDTNIRA (Cloud Track)

    - + cvc5-cloud 0 133.154101040 @@ -184,7 +184,7 @@

    UFDTNIRA (Cloud Track)

    - + Vampire 0 01200.0000140 @@ -204,7 +204,7 @@

    UFDTNIRA (Cloud Track)

    - + cvc5-cloud 0 0120.000055 @@ -213,7 +213,7 @@

    UFDTNIRA (Cloud Track)

    - + Vampire 0 0120.000055 @@ -237,7 +237,6 @@

    UFDTNIRA (Cloud Track)

    - + - diff --git a/archive/2022/results/ufdtnira-parallel.html b/archive/2022/results/ufdtnira-parallel.html index 58dc31c4..7a5e8333 100644 --- a/archive/2022/results/ufdtnira-parallel.html +++ b/archive/2022/results/ufdtnira-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFDTNIRA (Parallel Track)

    Competition results for the UFDTNIRA - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    UFDTNIRA (Parallel Track)

    - + Vampire 0 06000.000050 @@ -137,7 +137,7 @@

    UFDTNIRA (Parallel Track)

    - + Vampire 0 00.0000050 @@ -157,7 +157,7 @@

    UFDTNIRA (Parallel Track)

    - + Vampire 0 00.0000050 @@ -177,7 +177,7 @@

    UFDTNIRA (Parallel Track)

    - + Vampire 0 0120.000055 @@ -201,7 +201,6 @@

    UFDTNIRA (Parallel Track)

    - + - diff --git a/archive/2022/results/ufdtnira-proof-exhibition.html b/archive/2022/results/ufdtnira-proof-exhibition.html index 2b29b431..078866c4 100644 --- a/archive/2022/results/ufdtnira-proof-exhibition.html +++ b/archive/2022/results/ufdtnira-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFDTNIRA (Proof Exhibition Track)

    Competition results for the UFDTNIRA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    UFDTNIRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 2004 @@ -130,7 +130,7 @@

    UFDTNIRA (Proof Exhibition Track)

    - + cvc5 0 1986 @@ -152,7 +152,7 @@

    UFDTNIRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 20046754.0736750.09785 @@ -161,7 +161,7 @@

    UFDTNIRA (Proof Exhibition Track)

    - + cvc5 0 198623106.48222705.5532611 @@ -185,7 +185,6 @@

    UFDTNIRA (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/ufdtnira-single-query.html b/archive/2022/results/ufdtnira-single-query.html index d7b96469..596d3428 100644 --- a/archive/2022/results/ufdtnira-single-query.html +++ b/archive/2022/results/ufdtnira-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFDTNIRA (Single Query Track)

    Competition results for the UFDTNIRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UFDTNIRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5— - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    UFDTNIRA (Single Query Track)

    - + 2020-CVC4n 0 1965 @@ -142,7 +142,7 @@

    UFDTNIRA (Single Query Track)

    - + cvc5 0 1959 @@ -153,7 +153,7 @@

    UFDTNIRA (Single Query Track)

    - + z3-4.8.17n 0 1929 @@ -164,7 +164,7 @@

    UFDTNIRA (Single Query Track)

    - + Vampire 0 1795 @@ -175,7 +175,7 @@

    UFDTNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -197,7 +197,7 @@

    UFDTNIRA (Single Query Track)

    - + 2020-CVC4n 0 196526597.98526597.91619650196518422 @@ -206,7 +206,7 @@

    UFDTNIRA (Single Query Track)

    - + cvc5 0 1959132000.66131999.391195901959190108 @@ -215,7 +215,7 @@

    UFDTNIRA (Single Query Track)

    - + z3-4.8.17n 0 1929229755.047229751.5761929151914220190 @@ -224,7 +224,7 @@

    UFDTNIRA (Single Query Track)

    - + Vampire 0 1824672973.16436201.984182401824325315 @@ -233,7 +233,7 @@

    UFDTNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 010015.2125921.42700021490 @@ -253,7 +253,7 @@

    UFDTNIRA (Single Query Track)

    - + z3-4.8.17n 0 154.1274.0991515002134190 @@ -262,7 +262,7 @@

    UFDTNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 068.09140.3730001521340 @@ -271,7 +271,7 @@

    UFDTNIRA (Single Query Track)

    - + 2020-CVC4n 0 01200.4311200.42800015213422 @@ -280,7 +280,7 @@

    UFDTNIRA (Single Query Track)

    - + cvc5 0 01201.3441201.336000152134108 @@ -289,7 +289,7 @@

    UFDTNIRA (Single Query Track)

    - + Vampire 0 018000.018000.0000152134315 @@ -309,7 +309,7 @@

    UFDTNIRA (Single Query Track)

    - + 2020-CVC4n 0 196513390.21813390.1611965019651916522 @@ -318,7 +318,7 @@

    UFDTNIRA (Single Query Track)

    - + cvc5 0 195911752.53311751.2419590195925165108 @@ -327,7 +327,7 @@

    UFDTNIRA (Single Query Track)

    - + z3-4.8.17n 0 191469946.79169943.36219140191470165190 @@ -336,7 +336,7 @@

    UFDTNIRA (Single Query Track)

    - + Vampire 0 1824453371.78238219.254182401824160165315 @@ -345,7 +345,7 @@

    UFDTNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 09263.655468.78600019841650 @@ -365,7 +365,7 @@

    UFDTNIRA (Single Query Track)

    - + 2020-CVC4n 0 1965725.985725.91619650196518422 @@ -374,7 +374,7 @@

    UFDTNIRA (Single Query Track)

    - + cvc5 0 19583404.3313402.657195801958191116 @@ -383,7 +383,7 @@

    UFDTNIRA (Single Query Track)

    - + z3-4.8.17n 0 19254971.2684967.6671925151910224198 @@ -392,7 +392,7 @@

    UFDTNIRA (Single Query Track)

    - + Vampire 0 144118933.50917583.155144101441708703 @@ -401,7 +401,7 @@

    UFDTNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 010015.2125921.42700021490 @@ -425,7 +425,6 @@

    UFDTNIRA (Single Query Track)

    - + - diff --git a/archive/2022/results/ufdtnira-unsat-core.html b/archive/2022/results/ufdtnira-unsat-core.html index 94f76da5..097db71c 100644 --- a/archive/2022/results/ufdtnira-unsat-core.html +++ b/archive/2022/results/ufdtnira-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFDTNIRA (Unsat Core Track)

    Competition results for the UFDTNIRA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    UFDTNIRA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    UFDTNIRA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 76602 @@ -137,7 +137,7 @@

    UFDTNIRA (Unsat Core Track)

    - + cvc5 0 76228 @@ -148,7 +148,7 @@

    UFDTNIRA (Unsat Core Track)

    - + z3-4.8.17n 0 75237 @@ -159,7 +159,7 @@

    UFDTNIRA (Unsat Core Track)

    - + Vampire 0 19507 @@ -170,7 +170,7 @@

    UFDTNIRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -192,7 +192,7 @@

    UFDTNIRA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 7660211014.24411035.8019 @@ -201,7 +201,7 @@

    UFDTNIRA (Unsat Core Track)

    - + cvc5 0 762287749.4737751.1036 @@ -210,7 +210,7 @@

    UFDTNIRA (Unsat Core Track)

    - + z3-4.8.17n 0 7523767491.81167490.19355 @@ -219,7 +219,7 @@

    UFDTNIRA (Unsat Core Track)

    - + Vampire 0 19507434712.122227399.164141 @@ -228,7 +228,7 @@

    UFDTNIRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 09447.5555847.6190 @@ -252,7 +252,6 @@

    UFDTNIRA (Unsat Core Track)

    - + - diff --git a/archive/2022/results/uffpdtnira-proof-exhibition.html b/archive/2022/results/uffpdtnira-proof-exhibition.html index a0d404f2..563c8957 100644 --- a/archive/2022/results/uffpdtnira-proof-exhibition.html +++ b/archive/2022/results/uffpdtnira-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFFPDTNIRA (Proof Exhibition Track)

    Competition results for the UFFPDTNIRA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    UFFPDTNIRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 176 @@ -130,7 +130,7 @@

    UFFPDTNIRA (Proof Exhibition Track)

    - + cvc5 0 176 @@ -152,7 +152,7 @@

    UFFPDTNIRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 176159656.546159656.414145131 @@ -161,7 +161,7 @@

    UFFPDTNIRA (Proof Exhibition Track)

    - + cvc5 0 176170490.677170487.639145140 @@ -185,7 +185,6 @@

    UFFPDTNIRA (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/uffpdtnira-single-query.html b/archive/2022/results/uffpdtnira-single-query.html index 39ba6262..e5ba266a 100644 --- a/archive/2022/results/uffpdtnira-single-query.html +++ b/archive/2022/results/uffpdtnira-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFFPDTNIRA (Single Query Track)

    Competition results for the UFFPDTNIRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UFFPDTNIRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    UFFPDTNIRA (Single Query Track)

    - + cvc5 0 362 @@ -142,7 +142,7 @@

    UFFPDTNIRA (Single Query Track)

    - + 2021-cvc5n 0 360 @@ -153,7 +153,7 @@

    UFFPDTNIRA (Single Query Track)

    - + z3-4.8.17n 0 334 @@ -164,7 +164,7 @@

    UFFPDTNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -186,7 +186,7 @@

    UFFPDTNIRA (Single Query Track)

    - + cvc5 0 36221578.77222048.622362403223410 @@ -195,7 +195,7 @@

    UFFPDTNIRA (Single Query Track)

    - + 2021-cvc5n 0 36028360.84628765.888360403203618 @@ -204,7 +204,7 @@

    UFFPDTNIRA (Single Query Track)

    - + z3-4.8.17n 0 33454964.77554889.691334352996235 @@ -213,7 +213,7 @@

    UFFPDTNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 01870.0561112.0750003960 @@ -233,7 +233,7 @@

    UFFPDTNIRA (Single Query Track)

    - + 2021-cvc5n 0 40984.886996.40940400335318 @@ -242,7 +242,7 @@

    UFFPDTNIRA (Single Query Track)

    - + cvc5 0 401218.6311226.24740400335310 @@ -251,7 +251,7 @@

    UFFPDTNIRA (Single Query Track)

    - + z3-4.8.17n 0 354202.6734182.05835350835335 @@ -260,7 +260,7 @@

    UFFPDTNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0203.43119.404000433530 @@ -280,7 +280,7 @@

    UFFPDTNIRA (Single Query Track)

    - + cvc5 0 3224246.0754570.437322032237110 @@ -289,7 +289,7 @@

    UFFPDTNIRA (Single Query Track)

    - + 2021-cvc5n 0 3207628.6027992.772320032057118 @@ -298,7 +298,7 @@

    UFFPDTNIRA (Single Query Track)

    - + z3-4.8.17n 0 29924157.77924103.3142990299267135 @@ -307,7 +307,7 @@

    UFFPDTNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 01529.429915.608000325710 @@ -327,7 +327,7 @@

    UFFPDTNIRA (Single Query Track)

    - + 2021-cvc5n 0 3521181.9921182.143352363164437 @@ -336,7 +336,7 @@

    UFFPDTNIRA (Single Query Track)

    - + cvc5 0 3501207.6661207.654350333174640 @@ -345,7 +345,7 @@

    UFFPDTNIRA (Single Query Track)

    - + z3-4.8.17n 0 2882624.3672575.962882526310890 @@ -354,7 +354,7 @@

    UFFPDTNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 01870.0561112.0750003960 @@ -378,7 +378,6 @@

    UFFPDTNIRA (Single Query Track)

    - + - diff --git a/archive/2022/results/uffpdtnira-unsat-core.html b/archive/2022/results/uffpdtnira-unsat-core.html index 2807cd38..9f2b1de2 100644 --- a/archive/2022/results/uffpdtnira-unsat-core.html +++ b/archive/2022/results/uffpdtnira-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFFPDTNIRA (Unsat Core Track)

    Competition results for the UFFPDTNIRA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    UFFPDTNIRA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    UFFPDTNIRA (Unsat Core Track)

    - + 2021-cvc5-ucn 0 6227 @@ -137,7 +137,7 @@

    UFFPDTNIRA (Unsat Core Track)

    - + z3-4.8.17n 0 6060 @@ -148,7 +148,7 @@

    UFFPDTNIRA (Unsat Core Track)

    - + cvc5 0 5451 @@ -159,7 +159,7 @@

    UFFPDTNIRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -181,7 +181,7 @@

    UFFPDTNIRA (Unsat Core Track)

    - + 2021-cvc5-ucn 0 62272002.3512024.8621 @@ -190,7 +190,7 @@

    UFFPDTNIRA (Unsat Core Track)

    - + z3-4.8.17n 0 60606601.1926593.3192 @@ -199,7 +199,7 @@

    UFFPDTNIRA (Unsat Core Track)

    - + cvc5 0 545159033.63159029.52948 @@ -208,7 +208,7 @@

    UFFPDTNIRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 01487.343904.1670 @@ -232,7 +232,6 @@

    UFFPDTNIRA (Unsat Core Track)

    - + - diff --git a/archive/2022/results/ufidl-proof-exhibition.html b/archive/2022/results/ufidl-proof-exhibition.html index b36a97e2..cc713dc5 100644 --- a/archive/2022/results/ufidl-proof-exhibition.html +++ b/archive/2022/results/ufidl-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFIDL (Proof Exhibition Track)

    Competition results for the UFIDL - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    UFIDL (Proof Exhibition Track)

    - + cvc5-lfsc 0 57 @@ -130,7 +130,7 @@

    UFIDL (Proof Exhibition Track)

    - + cvc5 0 57 @@ -141,7 +141,7 @@

    UFIDL (Proof Exhibition Track)

    - + veriT 0 55 @@ -152,7 +152,7 @@

    UFIDL (Proof Exhibition Track)

    - + smtinterpol 0 54 @@ -174,7 +174,7 @@

    UFIDL (Proof Exhibition Track)

    - + cvc5-lfsc 0 577.1347.09900 @@ -183,7 +183,7 @@

    UFIDL (Proof Exhibition Track)

    - + cvc5 0 57264.502246.24100 @@ -192,7 +192,7 @@

    UFIDL (Proof Exhibition Track)

    - + veriT 0 55195.501194.97220 @@ -201,7 +201,7 @@

    UFIDL (Proof Exhibition Track)

    - + smtinterpol 0 544193.7013997.58633 @@ -225,7 +225,6 @@

    UFIDL (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/ufidl-single-query.html b/archive/2022/results/ufidl-single-query.html index 8fbd7d1d..b5d8989e 100644 --- a/archive/2022/results/ufidl-single-query.html +++ b/archive/2022/results/ufidl-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFIDL (Single Query Track)

    Competition results for the UFIDL - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UFIDL (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    UFIDL (Single Query Track)

    - + z3-4.8.17n 0 11 @@ -142,7 +142,7 @@

    UFIDL (Single Query Track)

    - + 2020-CVC4n 0 10 @@ -153,7 +153,7 @@

    UFIDL (Single Query Track)

    - + cvc5 0 10 @@ -164,7 +164,7 @@

    UFIDL (Single Query Track)

    - + smtinterpol 0 8 @@ -175,7 +175,7 @@

    UFIDL (Single Query Track)

    - + veriT 0 7 @@ -186,7 +186,7 @@

    UFIDL (Single Query Track)

    - + Vampire 0 7 @@ -197,7 +197,7 @@

    UFIDL (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -219,7 +219,7 @@

    UFIDL (Single Query Track)

    - + z3-4.8.17n 0 117474.0837474.173112995 @@ -228,7 +228,7 @@

    UFIDL (Single Query Track)

    - + 2020-CVC4n 0 1010088.92710135.4821019108 @@ -237,7 +237,7 @@

    UFIDL (Single Query Track)

    - + cvc5 0 1010119.09310191.7831019108 @@ -246,7 +246,7 @@

    UFIDL (Single Query Track)

    - + smtinterpol 0 88842.2678707.326817127 @@ -255,7 +255,7 @@

    UFIDL (Single Query Track)

    - + veriT 0 711117.50211117.528707139 @@ -264,7 +264,7 @@

    UFIDL (Single Query Track)

    - + Vampire 0 719201.20115600.8267071313 @@ -273,7 +273,7 @@

    UFIDL (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0153.0274.452000200 @@ -293,7 +293,7 @@

    UFIDL (Single Query Track)

    - + z3-4.8.17n 0 21200.0771200.0732201175 @@ -302,7 +302,7 @@

    UFIDL (Single Query Track)

    - + 2020-CVC4n 0 1487.413533.9711102178 @@ -311,7 +311,7 @@

    UFIDL (Single Query Track)

    - + cvc5 0 1517.319590.0131102178 @@ -320,7 +320,7 @@

    UFIDL (Single Query Track)

    - + smtinterpol 0 11201.511200.9371102177 @@ -329,7 +329,7 @@

    UFIDL (Single Query Track)

    - + UltimateEliminator+MathSAT 0 013.9698.6040003170 @@ -338,7 +338,7 @@

    UFIDL (Single Query Track)

    - + veriT 0 01200.0231200.0260003179 @@ -347,7 +347,7 @@

    UFIDL (Single Query Track)

    - + Vampire 0 07200.213599.4100031713 @@ -367,7 +367,7 @@

    UFIDL (Single Query Track)

    - + z3-4.8.17n 0 90.6530.6389090115 @@ -376,7 +376,7 @@

    UFIDL (Single Query Track)

    - + 2020-CVC4n 0 91.5131.5119090118 @@ -385,7 +385,7 @@

    UFIDL (Single Query Track)

    - + cvc5 0 91.7731.779090118 @@ -394,7 +394,7 @@

    UFIDL (Single Query Track)

    - + veriT 0 7317.479317.5027072119 @@ -403,7 +403,7 @@

    UFIDL (Single Query Track)

    - + Vampire 0 72400.9912401.41670721113 @@ -412,7 +412,7 @@

    UFIDL (Single Query Track)

    - + smtinterpol 0 72828.2972701.3617072117 @@ -421,7 +421,7 @@

    UFIDL (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0100.30642.6180009110 @@ -441,7 +441,7 @@

    UFIDL (Single Query Track)

    - + z3-4.8.17n 0 11216.731216.711112999 @@ -450,7 +450,7 @@

    UFIDL (Single Query Track)

    - + 2020-CVC4n 0 9218.132218.23909119 @@ -459,7 +459,7 @@

    UFIDL (Single Query Track)

    - + cvc5 0 9218.459218.507909119 @@ -468,7 +468,7 @@

    UFIDL (Single Query Track)

    - + veriT 0 7264.237264.2387071311 @@ -477,7 +477,7 @@

    UFIDL (Single Query Track)

    - + Vampire 0 7312.991313.4167071313 @@ -486,7 +486,7 @@

    UFIDL (Single Query Track)

    - + smtinterpol 0 5363.904299.9435141510 @@ -495,7 +495,7 @@

    UFIDL (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0153.0274.452000200 @@ -519,7 +519,6 @@

    UFIDL (Single Query Track)

    - + - diff --git a/archive/2022/results/ufidl-unsat-core.html b/archive/2022/results/ufidl-unsat-core.html index 21d00b5a..2798bcba 100644 --- a/archive/2022/results/ufidl-unsat-core.html +++ b/archive/2022/results/ufidl-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFIDL (Unsat Core Track)

    Competition results for the UFIDL - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    UFIDL (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    UFIDL (Unsat Core Track)

    - + 2021-cvc5-ucn 0 1916 @@ -137,7 +137,7 @@

    UFIDL (Unsat Core Track)

    - + z3-4.8.17n 0 1915 @@ -148,7 +148,7 @@

    UFIDL (Unsat Core Track)

    - + cvc5 0 1913 @@ -159,7 +159,7 @@

    UFIDL (Unsat Core Track)

    - + Vampire 0 999 @@ -170,7 +170,7 @@

    UFIDL (Unsat Core Track)

    - + smtinterpol 0 981 @@ -181,7 +181,7 @@

    UFIDL (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 22 @@ -203,7 +203,7 @@

    UFIDL (Unsat Core Track)

    - + 2021-cvc5-ucn 0 19162.63.0750 @@ -212,7 +212,7 @@

    UFIDL (Unsat Core Track)

    - + z3-4.8.17n 0 19151.1181.1010 @@ -221,7 +221,7 @@

    UFIDL (Unsat Core Track)

    - + cvc5 0 19132.9412.9280 @@ -230,7 +230,7 @@

    UFIDL (Unsat Core Track)

    - + Vampire 0 9992401.8482402.0362 @@ -239,7 +239,7 @@

    UFIDL (Unsat Core Track)

    - + smtinterpol 0 9813635.4573617.3653 @@ -248,7 +248,7 @@

    UFIDL (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 22150.28288.490 @@ -272,7 +272,6 @@

    UFIDL (Unsat Core Track)

    - + - diff --git a/archive/2022/results/uflia-cloud.html b/archive/2022/results/uflia-cloud.html index a56fe3b2..51ecf4b4 100644 --- a/archive/2022/results/uflia-cloud.html +++ b/archive/2022/results/uflia-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFLIA (Cloud Track)

    Competition results for the UFLIA - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    UFLIA (Cloud Track)

    - + Vampire 2 616333.651606130 @@ -126,7 +126,7 @@

    UFLIA (Cloud Track)

    - + cvc5-cloud 3 022800.0000190 @@ -146,7 +146,7 @@

    UFLIA (Cloud Track)

    - + cvc5-cloud 0 00.00000190 @@ -155,7 +155,7 @@

    UFLIA (Cloud Track)

    - + Vampire 0 00.00000190 @@ -175,7 +175,7 @@

    UFLIA (Cloud Track)

    - + Vampire 2 63133.6516062110 @@ -184,7 +184,7 @@

    UFLIA (Cloud Track)

    - + cvc5-cloud 3 09600.00008110 @@ -204,7 +204,7 @@

    UFLIA (Cloud Track)

    - + Vampire 0 2442.3112021717 @@ -213,7 +213,7 @@

    UFLIA (Cloud Track)

    - + cvc5-cloud 0 0456.00001919 @@ -237,7 +237,6 @@

    UFLIA (Cloud Track)

    - + - diff --git a/archive/2022/results/uflia-parallel.html b/archive/2022/results/uflia-parallel.html index 49e6d60e..b30d87c0 100644 --- a/archive/2022/results/uflia-parallel.html +++ b/archive/2022/results/uflia-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFLIA (Parallel Track)

    Competition results for the UFLIA - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    UFLIA (Parallel Track)

    - + Vampire 3 418120.449404150 @@ -137,7 +137,7 @@

    UFLIA (Parallel Track)

    - + Vampire 0 00.00000190 @@ -157,7 +157,7 @@

    UFLIA (Parallel Track)

    - + Vampire 3 43720.4494043120 @@ -177,7 +177,7 @@

    UFLIA (Parallel Track)

    - + Vampire 0 3390.3563031616 @@ -201,7 +201,6 @@

    UFLIA (Parallel Track)

    - + - diff --git a/archive/2022/results/uflia-proof-exhibition.html b/archive/2022/results/uflia-proof-exhibition.html index ece32524..98c92b68 100644 --- a/archive/2022/results/uflia-proof-exhibition.html +++ b/archive/2022/results/uflia-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFLIA (Proof Exhibition Track)

    Competition results for the UFLIA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    UFLIA (Proof Exhibition Track)

    - + veriT 0 3742 @@ -130,7 +130,7 @@

    UFLIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 3737 @@ -141,7 +141,7 @@

    UFLIA (Proof Exhibition Track)

    - + smtinterpol 0 2485 @@ -152,7 +152,7 @@

    UFLIA (Proof Exhibition Track)

    - + cvc5 0 2310 @@ -174,7 +174,7 @@

    UFLIA (Proof Exhibition Track)

    - + veriT 0 3742207587.795207485.142175151 @@ -183,7 +183,7 @@

    UFLIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 3737234101.204233987.272180180 @@ -192,7 +192,7 @@

    UFLIA (Proof Exhibition Track)

    - + smtinterpol 0 24851895801.3741716530.88714321338 @@ -201,7 +201,7 @@

    UFLIA (Proof Exhibition Track)

    - + cvc5 0 23101792624.0261791013.27216071396 @@ -225,7 +225,6 @@

    UFLIA (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/uflia-single-query.html b/archive/2022/results/uflia-single-query.html index 59231061..34e4f1aa 100644 --- a/archive/2022/results/uflia-single-query.html +++ b/archive/2022/results/uflia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFLIA (Single Query Track)

    Competition results for the UFLIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UFLIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5smtinterpol - - + + cvc5 - - + + veriT - + @@ -131,7 +131,7 @@

    UFLIA (Single Query Track)

    - + cvc5 0 1635 @@ -142,7 +142,7 @@

    UFLIA (Single Query Track)

    - + 2020-CVC4n 0 1615 @@ -153,7 +153,7 @@

    UFLIA (Single Query Track)

    - + veriT 0 1545 @@ -164,7 +164,7 @@

    UFLIA (Single Query Track)

    - + Vampire 0 1494 @@ -175,7 +175,7 @@

    UFLIA (Single Query Track)

    - + z3-4.8.17n 0 1398 @@ -186,7 +186,7 @@

    UFLIA (Single Query Track)

    - + smtinterpol 0 413 @@ -197,7 +197,7 @@

    UFLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -219,7 +219,7 @@

    UFLIA (Single Query Track)

    - + cvc5 0 16351478725.8131479064.7416351163412131195 @@ -228,7 +228,7 @@

    UFLIA (Single Query Track)

    - + 2020-CVC4n 0 16151501062.5691501593.7216151161412331214 @@ -237,7 +237,7 @@

    UFLIA (Single Query Track)

    - + Vampire 0 15551978463.7481587577.65215550155512931274 @@ -246,7 +246,7 @@

    UFLIA (Single Query Track)

    - + veriT 0 15451548044.641547919.97315450154513031258 @@ -255,7 +255,7 @@

    UFLIA (Single Query Track)

    - + z3-4.8.17n 0 13981337670.9741345506.1981398613921450893 @@ -264,7 +264,7 @@

    UFLIA (Single Query Track)

    - + smtinterpol 0 4172927840.0482853986.609417241524312326 @@ -273,7 +273,7 @@

    UFLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 019859.73213625.63900028483 @@ -293,7 +293,7 @@

    UFLIA (Single Query Track)

    - + z3-4.8.17n 0 61200.9181200.90866012841893 @@ -302,7 +302,7 @@

    UFLIA (Single Query Track)

    - + smtinterpol 0 22407.7512403.552220528412326 @@ -311,7 +311,7 @@

    UFLIA (Single Query Track)

    - + 2020-CVC4n 0 14641.4864646.827110628411214 @@ -320,7 +320,7 @@

    UFLIA (Single Query Track)

    - + cvc5 0 15837.3035837.827110628411195 @@ -329,7 +329,7 @@

    UFLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 038.38920.219000728413 @@ -338,7 +338,7 @@

    UFLIA (Single Query Track)

    - + veriT 0 04311.0194311.081000728411258 @@ -347,7 +347,7 @@

    UFLIA (Single Query Track)

    - + Vampire 0 08400.08400.0000728411274 @@ -367,7 +367,7 @@

    UFLIA (Single Query Track)

    - + cvc5 0 1634125240.336125575.3111634016347011441195 @@ -376,7 +376,7 @@

    UFLIA (Single Query Track)

    - + 2020-CVC4n 0 1614147421.005147935.0791614016149011441214 @@ -385,7 +385,7 @@

    UFLIA (Single Query Track)

    - + Vampire 0 1555400453.208214957.91215550155514911441274 @@ -394,7 +394,7 @@

    UFLIA (Single Query Track)

    - + veriT 0 1545211165.773211144.51515450154515911441258 @@ -403,7 +403,7 @@

    UFLIA (Single Query Track)

    - + z3-4.8.17n 0 1392294749.959296211.9621392013923121144893 @@ -412,7 +412,7 @@

    UFLIA (Single Query Track)

    - + smtinterpol 0 4151588181.2161566349.8014150415128911442326 @@ -421,7 +421,7 @@

    UFLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 013803.7719964.676000170411443 @@ -441,7 +441,7 @@

    UFLIA (Single Query Track)

    - + veriT 0 146234425.50134405.95614620146213861365 @@ -450,7 +450,7 @@

    UFLIA (Single Query Track)

    - + cvc5 0 144036578.62736530.82314400144014081397 @@ -459,7 +459,7 @@

    UFLIA (Single Query Track)

    - + 2020-CVC4n 0 142735707.91635666.74614270142714211410 @@ -468,7 +468,7 @@

    UFLIA (Single Query Track)

    - + z3-4.8.17n 0 136935935.6235931.20813696136314791471 @@ -477,7 +477,7 @@

    UFLIA (Single Query Track)

    - + Vampire 0 129348646.25940092.41612930129315551541 @@ -486,7 +486,7 @@

    UFLIA (Single Query Track)

    - + smtinterpol 0 33061867.6160033.795330232825182449 @@ -495,7 +495,7 @@

    UFLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 015001.6948880.805000284818 @@ -519,7 +519,6 @@

    UFLIA (Single Query Track)

    - + - diff --git a/archive/2022/results/uflia-unsat-core.html b/archive/2022/results/uflia-unsat-core.html index 4e3edea1..7f5ff543 100644 --- a/archive/2022/results/uflia-unsat-core.html +++ b/archive/2022/results/uflia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFLIA (Unsat Core Track)

    Competition results for the UFLIA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    UFLIA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5Vampire - - + + @@ -126,7 +126,7 @@

    UFLIA (Unsat Core Track)

    - + 2021-cvc5-ucn 0 929204 @@ -137,7 +137,7 @@

    UFLIA (Unsat Core Track)

    - + z3-4.8.17n 0 878902 @@ -148,7 +148,7 @@

    UFLIA (Unsat Core Track)

    - + cvc5 0 850989 @@ -159,7 +159,7 @@

    UFLIA (Unsat Core Track)

    - + Vampire 0 846167 @@ -170,7 +170,7 @@

    UFLIA (Unsat Core Track)

    - + smtinterpol 0 593233 @@ -181,7 +181,7 @@

    UFLIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 1645 @@ -203,7 +203,7 @@

    UFLIA (Unsat Core Track)

    - + 2021-cvc5-ucn 0 929204212321.828213572.426162 @@ -212,7 +212,7 @@

    UFLIA (Unsat Core Track)

    - + z3-4.8.17n 0 878902308042.41308050.542195 @@ -221,7 +221,7 @@

    UFLIA (Unsat Core Track)

    - + Vampire 0 857713409275.392211098.69127 @@ -230,7 +230,7 @@

    UFLIA (Unsat Core Track)

    - + cvc5 0 850989237682.46237592.074180 @@ -239,7 +239,7 @@

    UFLIA (Unsat Core Track)

    - + smtinterpol 0 5939851737156.6521700466.4471370 @@ -248,7 +248,7 @@

    UFLIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 164534040.0325386.7368 @@ -272,7 +272,6 @@

    UFLIA (Unsat Core Track)

    - + - diff --git a/archive/2022/results/uflra-incremental.html b/archive/2022/results/uflra-incremental.html index 6738d4e9..ac67012a 100644 --- a/archive/2022/results/uflra-incremental.html +++ b/archive/2022/results/uflra-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFLRA (Incremental Track)

    Competition results for the UFLRA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    UFLRA (Incremental Track)

    Parallel Performance cvc5 - - + + @@ -124,7 +124,7 @@

    UFLRA (Incremental Track)

    - + 2021-z3n 0 328020142895.396142649.46225998149 @@ -133,7 +133,7 @@

    UFLRA (Incremental Track)

    - + z3-4.8.17n 0 323387144497.26144406.7126461450 @@ -142,7 +142,7 @@

    UFLRA (Incremental Track)

    - + cvc5 0 10772145090.13344973.92548028014 @@ -151,7 +151,7 @@

    UFLRA (Incremental Track)

    - + smtinterpol 0 100431384226.501376970.118487570226 @@ -160,7 +160,7 @@

    UFLRA (Incremental Track)

    - + UltimateEliminator+MathSAT 0 03875.4991722.9275880010 @@ -184,7 +184,6 @@

    UFLRA (Incremental Track)

    - + - diff --git a/archive/2022/results/uflra-proof-exhibition.html b/archive/2022/results/uflra-proof-exhibition.html index c0584a07..14ff7013 100644 --- a/archive/2022/results/uflra-proof-exhibition.html +++ b/archive/2022/results/uflra-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFLRA (Proof Exhibition Track)

    Competition results for the UFLRA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    UFLRA (Proof Exhibition Track)

    - + cvc5 0 10 @@ -130,7 +130,7 @@

    UFLRA (Proof Exhibition Track)

    - + veriT 0 10 @@ -141,7 +141,7 @@

    UFLRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 10 @@ -152,7 +152,7 @@

    UFLRA (Proof Exhibition Track)

    - + smtinterpol 0 10 @@ -174,7 +174,7 @@

    UFLRA (Proof Exhibition Track)

    - + veriT 0 100.5890.52800 @@ -183,7 +183,7 @@

    UFLRA (Proof Exhibition Track)

    - + cvc5 0 100.5850.57500 @@ -192,7 +192,7 @@

    UFLRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 100.7020.70100 @@ -201,7 +201,7 @@

    UFLRA (Proof Exhibition Track)

    - + smtinterpol 0 109.1394.63300 @@ -225,7 +225,6 @@

    UFLRA (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/uflra-single-query.html b/archive/2022/results/uflra-single-query.html index 4201a688..dbe78410 100644 --- a/archive/2022/results/uflra-single-query.html +++ b/archive/2022/results/uflra-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFLRA (Single Query Track)

    Competition results for the UFLRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UFLRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) veriTveriT— - - + + veriT - - + + veriT - + @@ -131,7 +131,7 @@

    UFLRA (Single Query Track)

    - + z3-4.8.17n 0 6 @@ -142,7 +142,7 @@

    UFLRA (Single Query Track)

    - + 2020-CVC4n 0 3 @@ -153,7 +153,7 @@

    UFLRA (Single Query Track)

    - + veriT 0 2 @@ -164,7 +164,7 @@

    UFLRA (Single Query Track)

    - + smtinterpol 0 2 @@ -175,7 +175,7 @@

    UFLRA (Single Query Track)

    - + cvc5 0 2 @@ -186,7 +186,7 @@

    UFLRA (Single Query Track)

    - + Vampire 0 2 @@ -197,7 +197,7 @@

    UFLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -219,7 +219,7 @@

    UFLRA (Single Query Track)

    - + z3-4.8.17n 0 62128.1092128.29164211 @@ -228,7 +228,7 @@

    UFLRA (Single Query Track)

    - + 2020-CVC4n 0 35370.7365371.12731244 @@ -237,7 +237,7 @@

    UFLRA (Single Query Track)

    - + veriT 0 24800.3454800.34420254 @@ -246,7 +246,7 @@

    UFLRA (Single Query Track)

    - + smtinterpol 0 24801.9444801.26120254 @@ -255,7 +255,7 @@

    UFLRA (Single Query Track)

    - + cvc5 0 25816.2465819.15520254 @@ -264,7 +264,7 @@

    UFLRA (Single Query Track)

    - + Vampire 0 26000.2936000.56720255 @@ -273,7 +273,7 @@

    UFLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 032.98621.22800070 @@ -293,7 +293,7 @@

    UFLRA (Single Query Track)

    - + z3-4.8.17n 0 42128.0712128.256440121 @@ -302,7 +302,7 @@

    UFLRA (Single Query Track)

    - + 2020-CVC4n 0 15370.7015371.093110424 @@ -311,7 +311,7 @@

    UFLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 023.50515.586000520 @@ -320,7 +320,7 @@

    UFLRA (Single Query Track)

    - + veriT 0 04800.3214800.321000524 @@ -329,7 +329,7 @@

    UFLRA (Single Query Track)

    - + smtinterpol 0 04800.6254800.414000524 @@ -338,7 +338,7 @@

    UFLRA (Single Query Track)

    - + cvc5 0 05816.25819.11000524 @@ -347,7 +347,7 @@

    UFLRA (Single Query Track)

    - + Vampire 0 06000.06000.0000525 @@ -367,7 +367,7 @@

    UFLRA (Single Query Track)

    - + veriT 0 20.0240.023202054 @@ -376,7 +376,7 @@

    UFLRA (Single Query Track)

    - + 2020-CVC4n 0 20.0350.034202054 @@ -385,7 +385,7 @@

    UFLRA (Single Query Track)

    - + z3-4.8.17n 0 20.0380.035202051 @@ -394,7 +394,7 @@

    UFLRA (Single Query Track)

    - + cvc5 0 20.0460.045202054 @@ -403,7 +403,7 @@

    UFLRA (Single Query Track)

    - + Vampire 0 20.2930.567202055 @@ -412,7 +412,7 @@

    UFLRA (Single Query Track)

    - + smtinterpol 0 21.3190.847202054 @@ -421,7 +421,7 @@

    UFLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 09.4825.642000250 @@ -441,7 +441,7 @@

    UFLRA (Single Query Track)

    - + z3-4.8.17n 0 497.94597.9442233 @@ -450,7 +450,7 @@

    UFLRA (Single Query Track)

    - + veriT 0 296.34596.34420254 @@ -459,7 +459,7 @@

    UFLRA (Single Query Track)

    - + smtinterpol 0 297.94497.26120254 @@ -468,7 +468,7 @@

    UFLRA (Single Query Track)

    - + 2020-CVC4n 0 2120.035120.03420255 @@ -477,7 +477,7 @@

    UFLRA (Single Query Track)

    - + cvc5 0 2120.046120.04520255 @@ -486,7 +486,7 @@

    UFLRA (Single Query Track)

    - + Vampire 0 2120.293120.56720255 @@ -495,7 +495,7 @@

    UFLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 032.98621.22800070 @@ -519,7 +519,6 @@

    UFLRA (Single Query Track)

    - + - diff --git a/archive/2022/results/uflra-unsat-core.html b/archive/2022/results/uflra-unsat-core.html index 021b11ef..21a7ce8e 100644 --- a/archive/2022/results/uflra-unsat-core.html +++ b/archive/2022/results/uflra-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFLRA (Unsat Core Track)

    Competition results for the UFLRA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    UFLRA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    UFLRA (Unsat Core Track)

    - + z3-4.8.17n 0 16 @@ -137,7 +137,7 @@

    UFLRA (Unsat Core Track)

    - + 2021-cvc5-ucn 0 16 @@ -148,7 +148,7 @@

    UFLRA (Unsat Core Track)

    - + cvc5 0 16 @@ -159,7 +159,7 @@

    UFLRA (Unsat Core Track)

    - + smtinterpol 0 16 @@ -170,7 +170,7 @@

    UFLRA (Unsat Core Track)

    - + Vampire 0 0 @@ -181,7 +181,7 @@

    UFLRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -203,7 +203,7 @@

    UFLRA (Unsat Core Track)

    - + z3-4.8.17n 0 160.1870.1770 @@ -212,7 +212,7 @@

    UFLRA (Unsat Core Track)

    - + cvc5 0 160.2350.2410 @@ -221,7 +221,7 @@

    UFLRA (Unsat Core Track)

    - + 2021-cvc5-ucn 0 160.2110.3910 @@ -230,7 +230,7 @@

    UFLRA (Unsat Core Track)

    - + smtinterpol 0 167.2014.6550 @@ -239,7 +239,7 @@

    UFLRA (Unsat Core Track)

    - + Vampire 0 01.0471.3490 @@ -248,7 +248,7 @@

    UFLRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 048.83430.5520 @@ -272,7 +272,6 @@

    UFLRA (Unsat Core Track)

    - + - diff --git a/archive/2022/results/ufnia-cloud.html b/archive/2022/results/ufnia-cloud.html index 51277005..2beaefd8 100644 --- a/archive/2022/results/ufnia-cloud.html +++ b/archive/2022/results/ufnia-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFNIA (Cloud Track)

    Competition results for the UFNIA - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    UFNIA (Cloud Track)

    - + Vampire 0 136017.011101300 @@ -126,7 +126,7 @@

    UFNIA (Cloud Track)

    - + cvc5-cloud 1 136014.655101300 @@ -146,7 +146,7 @@

    UFNIA (Cloud Track)

    - + cvc5-cloud 0 00.00000310 @@ -155,7 +155,7 @@

    UFNIA (Cloud Track)

    - + Vampire 0 00.00000310 @@ -175,7 +175,7 @@

    UFNIA (Cloud Track)

    - + Vampire 0 11217.0111011290 @@ -184,7 +184,7 @@

    UFNIA (Cloud Track)

    - + cvc5-cloud 1 11214.6551011290 @@ -204,7 +204,7 @@

    UFNIA (Cloud Track)

    - + cvc5-cloud 0 1734.6551013030 @@ -213,7 +213,7 @@

    UFNIA (Cloud Track)

    - + Vampire 0 1737.0111013030 @@ -237,7 +237,6 @@

    UFNIA (Cloud Track)

    - + - diff --git a/archive/2022/results/ufnia-incremental.html b/archive/2022/results/ufnia-incremental.html index 249e723b..673ed431 100644 --- a/archive/2022/results/ufnia-incremental.html +++ b/archive/2022/results/ufnia-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFNIA (Incremental Track)

    Competition results for the UFNIA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    UFNIA (Incremental Track)

    Parallel Performance cvc5 - - + + @@ -124,7 +124,7 @@

    UFNIA (Incremental Track)

    - + z3-4.8.17n 0 965411338651.8521338512.825259501978 @@ -133,7 +133,7 @@

    UFNIA (Incremental Track)

    - + 2020-z3n 0 956501349168.6361349138.005260392987 @@ -142,7 +142,7 @@

    UFNIA (Incremental Track)

    - + cvc5 0 307901023455.4681023285.593325252818 @@ -151,7 +151,7 @@

    UFNIA (Incremental Track)

    - + smtinterpol 0 154161329277.221312991.053406261058 @@ -160,7 +160,7 @@

    UFNIA (Incremental Track)

    - + UltimateEliminator+MathSAT 0 08815.1413913.0373560420 @@ -184,7 +184,6 @@

    UFNIA (Incremental Track)

    - + - diff --git a/archive/2022/results/ufnia-parallel.html b/archive/2022/results/ufnia-parallel.html index 44dbe32d..5fe84125 100644 --- a/archive/2022/results/ufnia-parallel.html +++ b/archive/2022/results/ufnia-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFNIA (Parallel Track)

    Competition results for the UFNIA - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    UFNIA (Parallel Track)

    - + Vampire 0 136001.43101300 @@ -137,7 +137,7 @@

    UFNIA (Parallel Track)

    - + Vampire 0 00.00000310 @@ -157,7 +157,7 @@

    UFNIA (Parallel Track)

    - + Vampire 0 11.431010300 @@ -177,7 +177,7 @@

    UFNIA (Parallel Track)

    - + Vampire 0 1721.431013030 @@ -201,7 +201,6 @@

    UFNIA (Parallel Track)

    - + - diff --git a/archive/2022/results/ufnia-proof-exhibition.html b/archive/2022/results/ufnia-proof-exhibition.html index 1773403b..0c99f55b 100644 --- a/archive/2022/results/ufnia-proof-exhibition.html +++ b/archive/2022/results/ufnia-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFNIA (Proof Exhibition Track)

    Competition results for the UFNIA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    UFNIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 2906 @@ -130,7 +130,7 @@

    UFNIA (Proof Exhibition Track)

    - + cvc5 0 1652 @@ -152,7 +152,7 @@

    UFNIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 2906941139.626941088.845724722 @@ -161,7 +161,7 @@

    UFNIA (Proof Exhibition Track)

    - + cvc5 0 16522165924.6792163369.62519781671 @@ -185,7 +185,6 @@

    UFNIA (Proof Exhibition Track)

    - + - diff --git a/archive/2022/results/ufnia-single-query.html b/archive/2022/results/ufnia-single-query.html index 68fe07d4..16fc4852 100644 --- a/archive/2022/results/ufnia-single-query.html +++ b/archive/2022/results/ufnia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFNIA (Single Query Track)

    Competition results for the UFNIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UFNIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    UFNIA (Single Query Track)

    - + cvc5 0 3525 @@ -142,7 +142,7 @@

    UFNIA (Single Query Track)

    - + 2020-CVC4n 0 3434 @@ -153,7 +153,7 @@

    UFNIA (Single Query Track)

    - + z3-4.8.17n 0 2766 @@ -164,7 +164,7 @@

    UFNIA (Single Query Track)

    - + Vampire 0 2378 @@ -175,7 +175,7 @@

    UFNIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 568 @@ -197,7 +197,7 @@

    UFNIA (Single Query Track)

    - + cvc5 0 35263428154.4293430424.8943526693283327522746 @@ -206,7 +206,7 @@

    UFNIA (Single Query Track)

    - + 2020-CVC4n 0 34343454973.4443460234.1643434691274328442759 @@ -215,7 +215,7 @@

    UFNIA (Single Query Track)

    - + z3-4.8.17n 0 27662836446.7712836591.8822766608215835122037 @@ -224,7 +224,7 @@

    UFNIA (Single Query Track)

    - + Vampire 0 26515725630.6074517275.46226510265136273579 @@ -233,7 +233,7 @@

    UFNIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 568634225.641621264.8645683971715710480 @@ -253,7 +253,7 @@

    UFNIA (Single Query Track)

    - + cvc5 0 69356190.57456291.51369369302855572746 @@ -262,7 +262,7 @@

    UFNIA (Single Query Track)

    - + 2020-CVC4n 0 69144452.2444773.11969169103055572759 @@ -271,7 +271,7 @@

    UFNIA (Single Query Track)

    - + z3-4.8.17n 0 608102094.441102083.864608608011355572037 @@ -280,7 +280,7 @@

    UFNIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 397328416.958327469.98339739703245557480 @@ -289,7 +289,7 @@

    UFNIA (Single Query Track)

    - + Vampire 0 0969604.87865099.4300072155573579 @@ -309,7 +309,7 @@

    UFNIA (Single Query Track)

    - + cvc5 0 2833537563.855539733.38128330283336230832746 @@ -318,7 +318,7 @@

    UFNIA (Single Query Track)

    - + 2020-CVC4n 0 2743646931.645651799.36227430274345230832759 @@ -327,7 +327,7 @@

    UFNIA (Single Query Track)

    - + Vampire 0 26511575137.807818905.23426510265154430833579 @@ -336,7 +336,7 @@

    UFNIA (Single Query Track)

    - + z3-4.8.17n 0 2158754023.509754005.56215802158103730832037 @@ -345,7 +345,7 @@

    UFNIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 171182141.757174525.609171017130243083480 @@ -365,7 +365,7 @@

    UFNIA (Single Query Track)

    - + cvc5 0 280786240.98786204.6482807625218234713465 @@ -374,7 +374,7 @@

    UFNIA (Single Query Track)

    - + 2020-CVC4n 0 277586675.83486655.6472775601217435033493 @@ -383,7 +383,7 @@

    UFNIA (Single Query Track)

    - + z3-4.8.17n 0 261283880.45483776.6562612586202636663251 @@ -392,7 +392,7 @@

    UFNIA (Single Query Track)

    - + Vampire 0 1516125005.827116799.56815160151647624738 @@ -401,7 +401,7 @@

    UFNIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 53443761.5732008.5265343641705744576 @@ -425,7 +425,6 @@

    UFNIA (Single Query Track)

    - + - diff --git a/archive/2022/results/ufnia-unsat-core.html b/archive/2022/results/ufnia-unsat-core.html index b4a118f7..e23cd7f9 100644 --- a/archive/2022/results/ufnia-unsat-core.html +++ b/archive/2022/results/ufnia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFNIA (Unsat Core Track)

    Competition results for the UFNIA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    UFNIA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5Vampire - - + + @@ -126,7 +126,7 @@

    UFNIA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 99372 @@ -137,7 +137,7 @@

    UFNIA (Unsat Core Track)

    - + cvc5 0 96878 @@ -148,7 +148,7 @@

    UFNIA (Unsat Core Track)

    - + Vampire 0 93427 @@ -159,7 +159,7 @@

    UFNIA (Unsat Core Track)

    - + z3-4.8.17n 0 75076 @@ -170,7 +170,7 @@

    UFNIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 657 @@ -192,7 +192,7 @@

    UFNIA (Unsat Core Track)

    - + Vampire 0 1010851501179.197784470.344466 @@ -201,7 +201,7 @@

    UFNIA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 99372947724.757948477.484749 @@ -210,7 +210,7 @@

    UFNIA (Unsat Core Track)

    - + cvc5 0 96878949216.345949178.834726 @@ -219,7 +219,7 @@

    UFNIA (Unsat Core Track)

    - + z3-4.8.17n 0 75076673681.763674252.674468 @@ -228,7 +228,7 @@

    UFNIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 657264165.631253263.63162 @@ -252,7 +252,6 @@

    UFNIA (Unsat Core Track)

    - + - diff --git a/archive/2022/results/ufnra-incremental.html b/archive/2022/results/ufnra-incremental.html index 47c15a2d..f7d673fd 100644 --- a/archive/2022/results/ufnra-incremental.html +++ b/archive/2022/results/ufnra-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFNRA (Incremental Track)

    Competition results for the UFNRA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    UFNRA (Incremental Track)

    Parallel Performance cvc5 - - + + @@ -124,7 +124,7 @@

    UFNRA (Incremental Track)

    - + z3-4.8.17n 0 2092.2032.19200 @@ -133,7 +133,7 @@

    UFNRA (Incremental Track)

    - + 2020-z3n 0 2092.8662.85600 @@ -142,7 +142,7 @@

    UFNRA (Incremental Track)

    - + cvc5 0 50.6620.6432040 @@ -151,7 +151,7 @@

    UFNRA (Incremental Track)

    - + smtinterpol 0 24800.04800.02074 @@ -160,7 +160,7 @@

    UFNRA (Incremental Track)

    - + UltimateEliminator+MathSAT 0 016.677.4172090 @@ -184,7 +184,6 @@

    UFNRA (Incremental Track)

    - + - diff --git a/archive/2022/slides.html b/archive/2022/slides.html index 0a91f974..910ed92e 100644 --- a/archive/2022/slides.html +++ b/archive/2022/slides.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -102,7 +102,6 @@

    SMT workshop presentation

    - + - diff --git a/archive/2022/specs.html b/archive/2022/specs.html index 1c6ae2fd..68c14b0b 100644 --- a/archive/2022/specs.html +++ b/archive/2022/specs.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -96,7 +96,6 @@

    Machine Specifications

    - + - diff --git a/archive/2022/stats.html b/archive/2022/stats.html index 0d55d35a..6681791f 100644 --- a/archive/2022/stats.html +++ b/archive/2022/stats.html @@ -48,7 +48,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -69,7 +69,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -154,7 +154,6 @@

    SMT-COMP 2022 - + - diff --git a/archive/2022/tools.html b/archive/2022/tools.html index 4e73d07d..4db0c971 100644 --- a/archive/2022/tools.html +++ b/archive/2022/tools.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2022 Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -73,85 +73,85 @@

    Tools

    Pre-Processor (Benchmark Scrambler)

    GitHub Repository

    SMT-COMP 2022 Releases

      - - + +
    • Single Query Track
      • available on StarExec as SMT-COMP 2021 Single-Query Scrambler (id: 708)
    • - - + +
    • Incremental Track
      • available on StarExec as SMT-COMP 2021 Incremental Scrambler (id: 709)
    • - - + +
    • Unsat Core Track
      • available on StarExec as SMT-COMP 2021 Unsat Core Scrambler (id: 711)
    • - - + +
    • Model Validation Track
      • available on StarExec as SMT-COMP 2021 Model Validation Scrambler (id: 710)
    • - - + +
    • Proof Exhibition Track
      • available on StarExec as SMT-COMP 2022 Proof Exhibition Scrambler (id: 729)
    • - - + +

    Post-Processor

    GitHub Repository

    SMT-COMP 2022 Releases

      - - + +
    • Single Query Track
      • available on StarExec as SMT-COMP 2021 Single Query (id: 692)
    • - - + +
    • Incremental Track
      • available on StarExec as SMT-COMP 2021 Incremental (id: 691)
    • - - + +
    • Unsat Core Track
      • available on StarExec as SMT-COMP 2021 Unsat Core (id: 727)
    • - - + +
    • Model Validation Track
      • available on StarExec as SMT-COMP 2022 Model-Validation (id: 742)
    • - - + +
    • Proof Exhibition Track
      • available on StarExec as SMT-COMP 2022 Proof-Exhibition (id: 741)
    • - - + +

    Trace executor

    GitHub Repository
    Sources
    Binary
    @@ -171,7 +171,6 @@

    Competition scripts

    Gi - + - diff --git a/archive/2023/benchmarks.html b/archive/2023/benchmarks.html index 079d3d41..cd27f00b 100644 --- a/archive/2023/benchmarks.html +++ b/archive/2023/benchmarks.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -648,7 +648,6 @@

    Benchmarks

    - + - diff --git a/archive/2023/comparisons.html b/archive/2023/comparisons.html index 07f14de4..37eac73d 100644 --- a/archive/2023/comparisons.html +++ b/archive/2023/comparisons.html @@ -48,7 +48,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -69,7 +69,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -209,7 +209,6 @@

    SMT-COMP 2023 - + - diff --git a/archive/2023/divisions/arith.html b/archive/2023/divisions/arith.html index 2f8ad199..551f2323 100644 --- a/archive/2023/divisions/arith.html +++ b/archive/2023/divisions/arith.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -102,7 +102,6 @@

    Tracks

    - + - diff --git a/archive/2023/divisions/bitvec.html b/archive/2023/divisions/bitvec.html index b9ad4567..c433573f 100644 --- a/archive/2023/divisions/bitvec.html +++ b/archive/2023/divisions/bitvec.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -102,7 +102,6 @@

    Tracks

    - + - diff --git a/archive/2023/divisions/equality-lineararith.html b/archive/2023/divisions/equality-lineararith.html index fbc0afe9..96947cc2 100644 --- a/archive/2023/divisions/equality-lineararith.html +++ b/archive/2023/divisions/equality-lineararith.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -102,7 +102,6 @@

    Tracks

    - + - diff --git a/archive/2023/divisions/equality-machinearith.html b/archive/2023/divisions/equality-machinearith.html index d4712160..862309a1 100644 --- a/archive/2023/divisions/equality-machinearith.html +++ b/archive/2023/divisions/equality-machinearith.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -102,7 +102,6 @@

    Tracks

    - + - diff --git a/archive/2023/divisions/equality-nonlineararith.html b/archive/2023/divisions/equality-nonlineararith.html index 6994a8cf..c682d15e 100644 --- a/archive/2023/divisions/equality-nonlineararith.html +++ b/archive/2023/divisions/equality-nonlineararith.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -102,7 +102,6 @@

    Tracks

    - + - diff --git a/archive/2023/divisions/equality.html b/archive/2023/divisions/equality.html index c3a044fb..6e45de30 100644 --- a/archive/2023/divisions/equality.html +++ b/archive/2023/divisions/equality.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -102,7 +102,6 @@

    Tracks

    - + - diff --git a/archive/2023/divisions/fparith.html b/archive/2023/divisions/fparith.html index e621f9b6..f78c32be 100644 --- a/archive/2023/divisions/fparith.html +++ b/archive/2023/divisions/fparith.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -102,7 +102,6 @@

    Tracks

    - + - diff --git a/archive/2023/divisions/qf-adt-bitvec.html b/archive/2023/divisions/qf-adt-bitvec.html index 5bf2514d..6bab462c 100644 --- a/archive/2023/divisions/qf-adt-bitvec.html +++ b/archive/2023/divisions/qf-adt-bitvec.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -87,7 +87,6 @@

    Tracks

    - + - diff --git a/archive/2023/divisions/qf-adt-linarith.html b/archive/2023/divisions/qf-adt-linarith.html index 6f3ee7bd..1fc43855 100644 --- a/archive/2023/divisions/qf-adt-linarith.html +++ b/archive/2023/divisions/qf-adt-linarith.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -87,7 +87,6 @@

    Tracks

    - + - diff --git a/archive/2023/divisions/qf-array-bitvec-linarith.html b/archive/2023/divisions/qf-array-bitvec-linarith.html index 17585106..9e48af6b 100644 --- a/archive/2023/divisions/qf-array-bitvec-linarith.html +++ b/archive/2023/divisions/qf-array-bitvec-linarith.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -87,7 +87,6 @@

    Tracks

    - + - diff --git a/archive/2023/divisions/qf-bitvec.html b/archive/2023/divisions/qf-bitvec.html index 2b430bb1..842e3f55 100644 --- a/archive/2023/divisions/qf-bitvec.html +++ b/archive/2023/divisions/qf-bitvec.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -105,7 +105,6 @@

    Tracks

    - + - diff --git a/archive/2023/divisions/qf-datatypes-bitvec-linarith.html b/archive/2023/divisions/qf-datatypes-bitvec-linarith.html index b823bb28..ada4cb0b 100644 --- a/archive/2023/divisions/qf-datatypes-bitvec-linarith.html +++ b/archive/2023/divisions/qf-datatypes-bitvec-linarith.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -87,7 +87,6 @@

    Tracks

    - + - diff --git a/archive/2023/divisions/qf-datatypes.html b/archive/2023/divisions/qf-datatypes.html index 5f169ec8..da9e724f 100644 --- a/archive/2023/divisions/qf-datatypes.html +++ b/archive/2023/divisions/qf-datatypes.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -102,7 +102,6 @@

    Tracks

    - + - diff --git a/archive/2023/divisions/qf-equality-bitvec-arith.html b/archive/2023/divisions/qf-equality-bitvec-arith.html index 246a89db..3dcbcf49 100644 --- a/archive/2023/divisions/qf-equality-bitvec-arith.html +++ b/archive/2023/divisions/qf-equality-bitvec-arith.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -87,7 +87,6 @@

    Tracks

    - + - diff --git a/archive/2023/divisions/qf-equality-bitvec.html b/archive/2023/divisions/qf-equality-bitvec.html index f886c6a1..3fc3ede0 100644 --- a/archive/2023/divisions/qf-equality-bitvec.html +++ b/archive/2023/divisions/qf-equality-bitvec.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -105,7 +105,6 @@

    Tracks

    - + - diff --git a/archive/2023/divisions/qf-equality-lineararith.html b/archive/2023/divisions/qf-equality-lineararith.html index 2ac07a7d..ad0a3a80 100644 --- a/archive/2023/divisions/qf-equality-lineararith.html +++ b/archive/2023/divisions/qf-equality-lineararith.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -105,7 +105,6 @@

    Tracks

    - + - diff --git a/archive/2023/divisions/qf-equality-nonlineararith.html b/archive/2023/divisions/qf-equality-nonlineararith.html index 125fc944..b1f2669e 100644 --- a/archive/2023/divisions/qf-equality-nonlineararith.html +++ b/archive/2023/divisions/qf-equality-nonlineararith.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -105,7 +105,6 @@

    Tracks

    - + - diff --git a/archive/2023/divisions/qf-equality.html b/archive/2023/divisions/qf-equality.html index 3fa272a8..3066d056 100644 --- a/archive/2023/divisions/qf-equality.html +++ b/archive/2023/divisions/qf-equality.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -105,7 +105,6 @@

    Tracks

    - + - diff --git a/archive/2023/divisions/qf-fparith.html b/archive/2023/divisions/qf-fparith.html index c3031aef..db9ee220 100644 --- a/archive/2023/divisions/qf-fparith.html +++ b/archive/2023/divisions/qf-fparith.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -105,7 +105,6 @@

    Tracks

    - + - diff --git a/archive/2023/divisions/qf-linearintarith.html b/archive/2023/divisions/qf-linearintarith.html index e48375ca..bc32868d 100644 --- a/archive/2023/divisions/qf-linearintarith.html +++ b/archive/2023/divisions/qf-linearintarith.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -105,7 +105,6 @@

    Tracks

    - + - diff --git a/archive/2023/divisions/qf-linearrealarith.html b/archive/2023/divisions/qf-linearrealarith.html index 95139a51..81f3d4d5 100644 --- a/archive/2023/divisions/qf-linearrealarith.html +++ b/archive/2023/divisions/qf-linearrealarith.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -105,7 +105,6 @@

    Tracks

    - + - diff --git a/archive/2023/divisions/qf-nonlinearintarith.html b/archive/2023/divisions/qf-nonlinearintarith.html index 6b159cf3..17fecafa 100644 --- a/archive/2023/divisions/qf-nonlinearintarith.html +++ b/archive/2023/divisions/qf-nonlinearintarith.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -105,7 +105,6 @@

    Tracks

    - + - diff --git a/archive/2023/divisions/qf-nonlinearrealarith.html b/archive/2023/divisions/qf-nonlinearrealarith.html index 52f4693d..8a48349b 100644 --- a/archive/2023/divisions/qf-nonlinearrealarith.html +++ b/archive/2023/divisions/qf-nonlinearrealarith.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -102,7 +102,6 @@

    Tracks

    - + - diff --git a/archive/2023/divisions/qf-strings.html b/archive/2023/divisions/qf-strings.html index 42c95b4f..b39ccf1f 100644 --- a/archive/2023/divisions/qf-strings.html +++ b/archive/2023/divisions/qf-strings.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -99,7 +99,6 @@

    Tracks

    - + - diff --git a/archive/2023/index.html b/archive/2023/index.html index 82e1616d..8d4bcc7b 100644 --- a/archive/2023/index.html +++ b/archive/2023/index.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -138,7 +138,6 @@

    Acknowledgment

    - + - diff --git a/archive/2023/model.html b/archive/2023/model.html index e17a4fa7..8d6ea95e 100644 --- a/archive/2023/model.html +++ b/archive/2023/model.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -236,7 +236,6 @@

    Array values

    - + - diff --git a/archive/2023/news/2023-02-07-call-for-benchmark.html b/archive/2023/news/2023-02-07-call-for-benchmark.html index 4af09728..5cfe0351 100644 --- a/archive/2023/news/2023-02-07-call-for-benchmark.html +++ b/archive/2023/news/2023-02-07-call-for-benchmark.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -85,7 +85,7 @@

    Call for benchmarks

    important. Of course, new challenging benchmarks are always appreciated.

    -

    For your submission please follow +

    For your submission please follow these guidelines.

    Note that this is a different (but improved) process from previous @@ -115,7 +115,6 @@

    Call for benchmarks

    - + - diff --git a/archive/2023/news/2023-02-07-call-for-solvers.html b/archive/2023/news/2023-02-07-call-for-solvers.html index 00daeb01..ac71368b 100644 --- a/archive/2023/news/2023-02-07-call-for-solvers.html +++ b/archive/2023/news/2023-02-07-call-for-solvers.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -117,7 +117,6 @@

    Call for solvers

    - + - diff --git a/archive/2023/news/2023-04-28-final-call-for-solvers.html b/archive/2023/news/2023-04-28-final-call-for-solvers.html index 103648f1..da9df125 100644 --- a/archive/2023/news/2023-04-28-final-call-for-solvers.html +++ b/archive/2023/news/2023-04-28-final-call-for-solvers.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -124,7 +124,6 @@

    Final Call for Solvers

    - + - diff --git a/archive/2023/news/2023-05-20-preliminary-solvers.html b/archive/2023/news/2023-05-20-preliminary-solvers.html index 17c8aa44..48506a88 100644 --- a/archive/2023/news/2023-05-20-preliminary-solvers.html +++ b/archive/2023/news/2023-05-20-preliminary-solvers.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -111,7 +111,6 @@

    StarExec Test Runs

    - + - diff --git a/archive/2023/news/2023-06-05-jobs-running.html b/archive/2023/news/2023-06-05-jobs-running.html index d249c93f..9c107ef0 100644 --- a/archive/2023/news/2023-06-05-jobs-running.html +++ b/archive/2023/news/2023-06-05-jobs-running.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -95,7 +95,6 @@

    SMT-COMP 2023 is Live

    - + - diff --git a/archive/2023/news/2023-07-06-competition-results.html b/archive/2023/news/2023-07-06-competition-results.html index 7ef96ba2..b91aee73 100644 --- a/archive/2023/news/2023-07-06-competition-results.html +++ b/archive/2023/news/2023-07-06-competition-results.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -80,7 +80,6 @@

    Competition results

    - + - diff --git a/archive/2023/parallel-and-cloud-tracks.html b/archive/2023/parallel-and-cloud-tracks.html index 512fbde8..8b9d6e0d 100644 --- a/archive/2023/parallel-and-cloud-tracks.html +++ b/archive/2023/parallel-and-cloud-tracks.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -142,7 +142,6 @@

    Solver Submission to Par - + - diff --git a/archive/2023/participants.html b/archive/2023/participants.html index 572753c6..ad7b8a80 100644 --- a/archive/2023/participants.html +++ b/archive/2023/participants.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -3615,7 +3615,6 @@

    QF_Strings

    - + - diff --git a/archive/2023/participants/2018-mathsat-incremental.html b/archive/2023/participants/2018-mathsat-incremental.html index 06e6cee2..118835ee 100644 --- a/archive/2023/participants/2018-mathsat-incremental.html +++ b/archive/2023/participants/2018-mathsat-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides
    Report

    - + @@ -144,8 +144,6 @@

    2018-MathSAT-incremental

    - + - - diff --git a/archive/2023/participants/2019-par4.html b/archive/2023/participants/2019-par4.html index 19f705e5..6062d24d 100644 --- a/archive/2023/participants/2019-par4.html +++ b/archive/2023/participants/2019-par4.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -144,8 +144,6 @@

    2019-Par4

    - + - - diff --git a/archive/2023/participants/2019-z3.html b/archive/2023/participants/2019-z3.html index 7ac1d6a6..1cc21fc8 100644 --- a/archive/2023/participants/2019-z3.html +++ b/archive/2023/participants/2019-z3.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -144,8 +144,6 @@

    2019-Z3

    - + - - diff --git a/archive/2023/participants/2020-bitwuzla.html b/archive/2023/participants/2020-bitwuzla.html index 539a7f97..b0191739 100644 --- a/archive/2023/participants/2020-bitwuzla.html +++ b/archive/2023/participants/2020-bitwuzla.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -144,8 +144,6 @@

    2020-Bitwuzla

    - + - - diff --git a/archive/2023/participants/2020-cvc4-uc.html b/archive/2023/participants/2020-cvc4-uc.html index 25fe3b51..06ae640e 100644 --- a/archive/2023/participants/2020-cvc4-uc.html +++ b/archive/2023/participants/2020-cvc4-uc.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -144,8 +144,6 @@

    2020-CVC4-uc

    - + - - diff --git a/archive/2023/participants/2020-cvc4.html b/archive/2023/participants/2020-cvc4.html index e81ef691..e94d444b 100644 --- a/archive/2023/participants/2020-cvc4.html +++ b/archive/2023/participants/2020-cvc4.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -144,8 +144,6 @@

    2020-CVC4

    - + - - diff --git a/archive/2023/participants/2020-yices2.html b/archive/2023/participants/2020-yices2.html index 3506a28c..1bb3c568 100644 --- a/archive/2023/participants/2020-yices2.html +++ b/archive/2023/participants/2020-yices2.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -144,8 +144,6 @@

    2020-Yices2

    - + - - diff --git a/archive/2023/participants/2020-z3.html b/archive/2023/participants/2020-z3.html index 16636a19..17495759 100644 --- a/archive/2023/participants/2020-z3.html +++ b/archive/2023/participants/2020-z3.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Participants
    Results
    Slides

    - + @@ -144,8 +144,6 @@

    2020-z3

    - + - - diff --git a/archive/2023/participants/2021-cvc5-inc.html b/archive/2023/participants/2021-cvc5-inc.html index 8afb85ec..f4c41380 100644 --- a/archive/2023/participants/2021-cvc5-inc.html +++ b/archive/2023/participants/2021-cvc5-inc.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -144,8 +144,6 @@

    2021-cvc5-inc

    - + - - diff --git a/archive/2023/participants/2021-cvc5-uc.html b/archive/2023/participants/2021-cvc5-uc.html index 95d7dc7e..002d92a9 100644 --- a/archive/2023/participants/2021-cvc5-uc.html +++ b/archive/2023/participants/2021-cvc5-uc.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -144,8 +144,6 @@

    2021-cvc5-uc

    - + - - diff --git a/archive/2023/participants/2021-mathsat5.html b/archive/2023/participants/2021-mathsat5.html index 6555a21f..cdff4ed8 100644 --- a/archive/2023/participants/2021-mathsat5.html +++ b/archive/2023/participants/2021-mathsat5.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -144,8 +144,6 @@

    2021-MathSAT5

    - + - - diff --git a/archive/2023/participants/2021-yices2-incremental.html b/archive/2023/participants/2021-yices2-incremental.html index be8ea0e7..2a43a4a3 100644 --- a/archive/2023/participants/2021-yices2-incremental.html +++ b/archive/2023/participants/2021-yices2-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -144,8 +144,6 @@

    2021-Yices2 incremental

    - + - - diff --git a/archive/2023/participants/2021-yices2-model-validation.html b/archive/2023/participants/2021-yices2-model-validation.html index 8f625826..3c3a526b 100644 --- a/archive/2023/participants/2021-yices2-model-validation.html +++ b/archive/2023/participants/2021-yices2-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -144,8 +144,6 @@

    2021-Yices2 model-validation

    - + - - diff --git a/archive/2023/participants/2021-yices2.html b/archive/2023/participants/2021-yices2.html index 9db59cc0..733e46c7 100644 --- a/archive/2023/participants/2021-yices2.html +++ b/archive/2023/participants/2021-yices2.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -144,8 +144,6 @@

    2021-Yices2

    - + - - diff --git a/archive/2023/participants/2021-z3.html b/archive/2023/participants/2021-z3.html index 61076760..5c0ed87f 100644 --- a/archive/2023/participants/2021-z3.html +++ b/archive/2023/participants/2021-z3.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Parallel & Cloud Tracks
    Participants
    Results
    Slides

    - + @@ -144,8 +144,6 @@

    2021-z3

    - + - - diff --git a/archive/2023/participants/2022-bitwuzla.html b/archive/2023/participants/2022-bitwuzla.html index af003c76..883e782b 100644 --- a/archive/2023/participants/2022-bitwuzla.html +++ b/archive/2023/participants/2022-bitwuzla.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    2022-Bitwuzla

    - + - - diff --git a/archive/2023/participants/2022-cvc5.html b/archive/2023/participants/2022-cvc5.html index 0459f925..08720414 100644 --- a/archive/2023/participants/2022-cvc5.html +++ b/archive/2023/participants/2022-cvc5.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    2022-cvc5

    - + - - diff --git a/archive/2023/participants/2022-mathsat.html b/archive/2023/participants/2022-mathsat.html index 2f26ab08..48d8e9f7 100644 --- a/archive/2023/participants/2022-mathsat.html +++ b/archive/2023/participants/2022-mathsat.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    2022-MathSAT

    - + - - diff --git a/archive/2023/participants/2022-opensmt.html b/archive/2023/participants/2022-opensmt.html index 18f333fb..ef11b9b1 100644 --- a/archive/2023/participants/2022-opensmt.html +++ b/archive/2023/participants/2022-opensmt.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    2022-OpenSMT

    - + - - diff --git a/archive/2023/participants/2022-smtinterpol.html b/archive/2023/participants/2022-smtinterpol.html index 980888f4..fb05ce83 100644 --- a/archive/2023/participants/2022-smtinterpol.html +++ b/archive/2023/participants/2022-smtinterpol.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    2022-smtinterpol

    - + - - diff --git a/archive/2023/participants/2022-stp-fixed.html b/archive/2023/participants/2022-stp-fixed.html index 8d6c5c10..231a3983 100644 --- a/archive/2023/participants/2022-stp-fixed.html +++ b/archive/2023/participants/2022-stp-fixed.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    2022-STP-fixed

    - + - - diff --git a/archive/2023/participants/2022-ultimateeliminator-mathsat.html b/archive/2023/participants/2022-ultimateeliminator-mathsat.html index c813f27b..1f5ba277 100644 --- a/archive/2023/participants/2022-ultimateeliminator-mathsat.html +++ b/archive/2023/participants/2022-ultimateeliminator-mathsat.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    2022-UltimateEliminator+MathSAT

    - + - - diff --git a/archive/2023/participants/2022-vampire.html b/archive/2023/participants/2022-vampire.html index 5afda71b..ea16ac12 100644 --- a/archive/2023/participants/2022-vampire.html +++ b/archive/2023/participants/2022-vampire.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    2022-Vampire

    - + - - diff --git a/archive/2023/participants/2022-yices2.html b/archive/2023/participants/2022-yices2.html index 38a9acc2..60d6d84c 100644 --- a/archive/2023/participants/2022-yices2.html +++ b/archive/2023/participants/2022-yices2.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    2022-Yices2

    - + - - diff --git a/archive/2023/participants/2022-z3-4-8-17.html b/archive/2023/participants/2022-z3-4-8-17.html index 3b0c0cfa..4ac12492 100644 --- a/archive/2023/participants/2022-z3-4-8-17.html +++ b/archive/2023/participants/2022-z3-4-8-17.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    2022-z3-4.8.17

    - + - - diff --git a/archive/2023/participants/2022-z3-fixed.html b/archive/2023/participants/2022-z3-fixed.html index 67ae3227..48486c8f 100644 --- a/archive/2023/participants/2022-z3-fixed.html +++ b/archive/2023/participants/2022-z3-fixed.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    2022-Z3++-fixed

    - + - - diff --git a/archive/2023/participants/2022-z3.html b/archive/2023/participants/2022-z3.html index d4cb6e52..e91861ab 100644 --- a/archive/2023/participants/2022-z3.html +++ b/archive/2023/participants/2022-z3.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides
    Rules
    Benchmarks
    Tools
    Specs
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    2022-Z3++

    - + - - diff --git a/archive/2023/participants/bitwuzla-fixed.html b/archive/2023/participants/bitwuzla-fixed.html index 237ac265..9f962c9a 100644 --- a/archive/2023/participants/bitwuzla-fixed.html +++ b/archive/2023/participants/bitwuzla-fixed.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    Bitwuzla Fixed

    - + - - diff --git a/archive/2023/participants/bitwuzla.html b/archive/2023/participants/bitwuzla.html index db9c8488..404072b0 100644 --- a/archive/2023/participants/bitwuzla.html +++ b/archive/2023/participants/bitwuzla.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    Bitwuzla

    - + - - diff --git a/archive/2023/participants/colibri.html b/archive/2023/participants/colibri.html index fdd6a2ea..c550bf71 100644 --- a/archive/2023/participants/colibri.html +++ b/archive/2023/participants/colibri.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    COLIBRI

    - + - - diff --git a/archive/2023/participants/cvc5-lfsc.html b/archive/2023/participants/cvc5-lfsc.html index ad98ff5f..ef64eefc 100644 --- a/archive/2023/participants/cvc5-lfsc.html +++ b/archive/2023/participants/cvc5-lfsc.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    cvc5-lfsc

    - + - - diff --git a/archive/2023/participants/cvc5-nra-ls.html b/archive/2023/participants/cvc5-nra-ls.html index 0c0ed15d..8167333c 100644 --- a/archive/2023/participants/cvc5-nra-ls.html +++ b/archive/2023/participants/cvc5-nra-ls.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    cvc5-NRA-LS

    - + - - diff --git a/archive/2023/participants/cvc5.html b/archive/2023/participants/cvc5.html index 5197f55d..76b016a0 100644 --- a/archive/2023/participants/cvc5.html +++ b/archive/2023/participants/cvc5.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    cvc5

    - + - - diff --git a/archive/2023/participants/iprover-fixed.html b/archive/2023/participants/iprover-fixed.html index 77a4926e..a5dfa248 100644 --- a/archive/2023/participants/iprover-fixed.html +++ b/archive/2023/participants/iprover-fixed.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    iProver Fixed

    - + - - diff --git a/archive/2023/participants/iprover.html b/archive/2023/participants/iprover.html index fbd88424..c916b9d4 100644 --- a/archive/2023/participants/iprover.html +++ b/archive/2023/participants/iprover.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    iProver

    - + - - diff --git a/archive/2023/participants/ismt.html b/archive/2023/participants/ismt.html index c1303a83..412acbfe 100644 --- a/archive/2023/participants/ismt.html +++ b/archive/2023/participants/ismt.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    ismt

    - + - - diff --git a/archive/2023/participants/opensmt.html b/archive/2023/participants/opensmt.html index ff8e179f..59428157 100644 --- a/archive/2023/participants/opensmt.html +++ b/archive/2023/participants/opensmt.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    OpenSMT

    - + - - diff --git a/archive/2023/participants/ostrich-fixed.html b/archive/2023/participants/ostrich-fixed.html index 4949065b..8d233e3f 100644 --- a/archive/2023/participants/ostrich-fixed.html +++ b/archive/2023/participants/ostrich-fixed.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    OSTRICH Fixed

    - + - - diff --git a/archive/2023/participants/ostrich.html b/archive/2023/participants/ostrich.html index 73265b09..6c6bf0f7 100644 --- a/archive/2023/participants/ostrich.html +++ b/archive/2023/participants/ostrich.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    OSTRICH

    - + - - diff --git a/archive/2023/participants/q3b-pbdd.html b/archive/2023/participants/q3b-pbdd.html index 0a092b46..2f697d5b 100644 --- a/archive/2023/participants/q3b-pbdd.html +++ b/archive/2023/participants/q3b-pbdd.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    Q3B-pBDD

    - + - - diff --git a/archive/2023/participants/q3b.html b/archive/2023/participants/q3b.html index 299f3080..1e3d9d8f 100644 --- a/archive/2023/participants/q3b.html +++ b/archive/2023/participants/q3b.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    Q3B

    - + - - diff --git a/archive/2023/participants/smt-rat-mcsat.html b/archive/2023/participants/smt-rat-mcsat.html index 970907da..6570e82d 100644 --- a/archive/2023/participants/smt-rat-mcsat.html +++ b/archive/2023/participants/smt-rat-mcsat.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    SMT-RAT-MCSAT

    - + - - diff --git a/archive/2023/participants/smtinterpol.html b/archive/2023/participants/smtinterpol.html index 6c7056a3..d56aae52 100644 --- a/archive/2023/participants/smtinterpol.html +++ b/archive/2023/participants/smtinterpol.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    SMTInterpol

    - + - - diff --git a/archive/2023/participants/stp.html b/archive/2023/participants/stp.html index ca7132bf..990e6c66 100644 --- a/archive/2023/participants/stp.html +++ b/archive/2023/participants/stp.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    STP

    - + - - diff --git a/archive/2023/participants/ultimateeliminator-mathsat.html b/archive/2023/participants/ultimateeliminator-mathsat.html index 7f3f720e..4bb53f11 100644 --- a/archive/2023/participants/ultimateeliminator-mathsat.html +++ b/archive/2023/participants/ultimateeliminator-mathsat.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    UltimateEliminator+MathSAT

    - + - - diff --git a/archive/2023/participants/ultimateintblastingwrapper-smtinterpol.html b/archive/2023/participants/ultimateintblastingwrapper-smtinterpol.html index f277c6fb..a211d4bc 100644 --- a/archive/2023/participants/ultimateintblastingwrapper-smtinterpol.html +++ b/archive/2023/participants/ultimateintblastingwrapper-smtinterpol.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    UltimateIntBlastingWrapper+SMTInterpol

    - + - - diff --git a/archive/2023/participants/vampire.html b/archive/2023/participants/vampire.html index 5518717b..17ac11c7 100644 --- a/archive/2023/participants/vampire.html +++ b/archive/2023/participants/vampire.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    Vampire

    - + - - diff --git a/archive/2023/participants/yaga.html b/archive/2023/participants/yaga.html index 5e7ae419..80def21d 100644 --- a/archive/2023/participants/yaga.html +++ b/archive/2023/participants/yaga.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    Yaga

    - + - - diff --git a/archive/2023/participants/yices-ismt.html b/archive/2023/participants/yices-ismt.html index f34f25ba..182884b8 100644 --- a/archive/2023/participants/yices-ismt.html +++ b/archive/2023/participants/yices-ismt.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    yices-ismt

    - + - - diff --git a/archive/2023/participants/yices2-fixed.html b/archive/2023/participants/yices2-fixed.html index 0285b2ce..6f444b8d 100644 --- a/archive/2023/participants/yices2-fixed.html +++ b/archive/2023/participants/yices2-fixed.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    Yices2 Fixed

    - + - - diff --git a/archive/2023/participants/yices2.html b/archive/2023/participants/yices2.html index eadcffee..34def6bb 100644 --- a/archive/2023/participants/yices2.html +++ b/archive/2023/participants/yices2.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    Yices2

    - + - - diff --git a/archive/2023/participants/yicesqs.html b/archive/2023/participants/yicesqs.html index 7f647169..e4b8e77c 100644 --- a/archive/2023/participants/yicesqs.html +++ b/archive/2023/participants/yicesqs.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    YicesQS

    - + - - diff --git a/archive/2023/participants/z3-alpha.html b/archive/2023/participants/z3-alpha.html index 655befff..a6c3b80e 100644 --- a/archive/2023/participants/z3-alpha.html +++ b/archive/2023/participants/z3-alpha.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    z3-alpha

    - + - - diff --git a/archive/2023/participants/z3-noodler-fixed.html b/archive/2023/participants/z3-noodler-fixed.html index b17675d3..37c39ed9 100644 --- a/archive/2023/participants/z3-noodler-fixed.html +++ b/archive/2023/participants/z3-noodler-fixed.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    Z3-Noodler Fixed

    - + - - diff --git a/archive/2023/participants/z3-noodler.html b/archive/2023/participants/z3-noodler.html index 6fea20fc..0e3fd788 100644 --- a/archive/2023/participants/z3-noodler.html +++ b/archive/2023/participants/z3-noodler.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    Z3-Noodler

    - + - - diff --git a/archive/2023/participants/z3-owl-fixed.html b/archive/2023/participants/z3-owl-fixed.html index 96100062..b0f549e1 100644 --- a/archive/2023/participants/z3-owl-fixed.html +++ b/archive/2023/participants/z3-owl-fixed.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    Z3-Owl Fixed

    - + - - diff --git a/archive/2023/participants/z3-owl.html b/archive/2023/participants/z3-owl.html index 7604f674..f4a594db 100644 --- a/archive/2023/participants/z3-owl.html +++ b/archive/2023/participants/z3-owl.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    Z3-Owl

    - + - - diff --git a/archive/2023/participants/z3.html b/archive/2023/participants/z3.html index d7b2ee4f..84541fa6 100644 --- a/archive/2023/participants/z3.html +++ b/archive/2023/participants/z3.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -144,8 +144,6 @@

    Z3++

    - + - - diff --git a/archive/2023/proof-track.html b/archive/2023/proof-track.html index 319632ab..c58cc722 100644 --- a/archive/2023/proof-track.html +++ b/archive/2023/proof-track.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -179,7 +179,6 @@

    Key-Value Pairs

    - + - diff --git a/archive/2023/results.html b/archive/2023/results.html index 9dd29003..603e8cba 100644 --- a/archive/2023/results.html +++ b/archive/2023/results.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -150,283 +150,283 @@

    Tracks Summary

    Divisions

    @@ -441,7 +441,6 @@

    Divisions

    - + - diff --git a/archive/2023/results/abv-proof-exhibition.html b/archive/2023/results/abv-proof-exhibition.html index 5b1540ce..2d36bf97 100644 --- a/archive/2023/results/abv-proof-exhibition.html +++ b/archive/2023/results/abv-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    ABV (Proof Exhibition Track)

    Competition results for the ABV - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    ABV (Proof Exhibition Track)

    - + cvc5-lfsc 0 44 @@ -130,7 +130,7 @@

    ABV (Proof Exhibition Track)

    - + cvc5 0 41 @@ -152,7 +152,7 @@

    ABV (Proof Exhibition Track)

    - + cvc5-lfsc 0 449.8029.9044410 @@ -161,7 +161,7 @@

    ABV (Proof Exhibition Track)

    - + cvc5 0 4115.10515.0954442 @@ -185,7 +185,6 @@

    ABV (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/abv-single-query.html b/archive/2023/results/abv-single-query.html index 3f6fa567..62bbb030 100644 --- a/archive/2023/results/abv-single-query.html +++ b/archive/2023/results/abv-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    ABV (Single Query Track)

    Competition results for the ABV - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    ABV (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    ABV (Single Query Track)

    - + cvc5 0 896 @@ -142,7 +142,7 @@

    ABV (Single Query Track)

    - + 2022-z3-4.8.17n 0 350 @@ -153,7 +153,7 @@

    ABV (Single Query Track)

    - + Bitwuzla Fixedn 0 51 @@ -164,7 +164,7 @@

    ABV (Single Query Track)

    - + Bitwuzla 0 51 @@ -175,7 +175,7 @@

    ABV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 9 @@ -186,7 +186,7 @@

    ABV (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 7 106 @@ -208,7 +208,7 @@

    ABV (Single Query Track)

    - + cvc5 0 89630896.19832020.048964134831591750 @@ -217,7 +217,7 @@

    ABV (Single Query Track)

    - + 2022-z3-4.8.17n 0 3501420.9571420.571350338122137136 @@ -226,7 +226,7 @@

    ABV (Single Query Track)

    - + Bitwuzla Fixedn 0 511.3341.355513120243647 @@ -235,7 +235,7 @@

    ABV (Single Query Track)

    - + Bitwuzla 0 511.3531.374513120243647 @@ -244,7 +244,7 @@

    ABV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 960.83532.32491824781 @@ -253,7 +253,7 @@

    ABV (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 7 1062267.6061364.75410621852381439 @@ -273,7 +273,7 @@

    ABV (Single Query Track)

    - + cvc5 0 41329321.47430443.73741341302421832750 @@ -282,7 +282,7 @@

    ABV (Single Query Track)

    - + 2022-z3-4.8.17n 0 338935.126934.65133833803171832136 @@ -291,7 +291,7 @@

    ABV (Single Query Track)

    - + Bitwuzla 0 310.4140.42931310624183247 @@ -300,7 +300,7 @@

    ABV (Single Query Track)

    - + Bitwuzla Fixedn 0 310.4160.43131310624183247 @@ -309,7 +309,7 @@

    ABV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 112.147.10311065418321 @@ -318,7 +318,7 @@

    ABV (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 7 211078.287888.47212106341832439 @@ -338,7 +338,7 @@

    ABV (Single Query Track)

    - + cvc5 0 4831574.7251576.302483048312003750 @@ -347,7 +347,7 @@

    ABV (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 0 851189.319476.284850853992003439 @@ -356,7 +356,7 @@

    ABV (Single Query Track)

    - + Bitwuzla Fixedn 0 200.9180.92420020464200347 @@ -365,7 +365,7 @@

    ABV (Single Query Track)

    - + Bitwuzla 0 200.940.94420020464200347 @@ -374,7 +374,7 @@

    ABV (Single Query Track)

    - + 2022-z3-4.8.17n 0 12485.831485.92120124722003136 @@ -383,7 +383,7 @@

    ABV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 848.69525.22180847620031 @@ -403,7 +403,7 @@

    ABV (Single Query Track)

    - + cvc5 0 754903.307886.72375428447017331678 @@ -412,7 +412,7 @@

    ABV (Single Query Track)

    - + 2022-z3-4.8.17n 0 34667.15466.548346335112141157 @@ -421,7 +421,7 @@

    ABV (Single Query Track)

    - + Bitwuzla Fixedn 0 511.3341.355513120243647 @@ -430,7 +430,7 @@

    ABV (Single Query Track)

    - + Bitwuzla 0 511.3531.374513120243647 @@ -439,7 +439,7 @@

    ABV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 960.83532.32491824783 @@ -448,7 +448,7 @@

    ABV (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 7 1041422.978540.8310420842383607 @@ -472,7 +472,6 @@

    ABV (Single Query Track)

    - + - diff --git a/archive/2023/results/abv-unsat-core.html b/archive/2023/results/abv-unsat-core.html index c8936406..21aa15bd 100644 --- a/archive/2023/results/abv-unsat-core.html +++ b/archive/2023/results/abv-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    ABV (Unsat Core Track)

    Competition results for the ABV - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    ABV (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    ABV (Unsat Core Track)

    - + cvc5 0 151 @@ -137,7 +137,7 @@

    ABV (Unsat Core Track)

    - + 2022-z3-4.8.17n 0 121 @@ -148,7 +148,7 @@

    ABV (Unsat Core Track)

    - + Bitwuzla Fixedn 0 111 @@ -159,7 +159,7 @@

    ABV (Unsat Core Track)

    - + Bitwuzla 0 111 @@ -170,7 +170,7 @@

    ABV (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -192,7 +192,7 @@

    ABV (Unsat Core Track)

    - + cvc5 0 1511.1891.1730 @@ -201,7 +201,7 @@

    ABV (Unsat Core Track)

    - + 2022-z3-4.8.17n 0 121729.474729.59913 @@ -210,7 +210,7 @@

    ABV (Unsat Core Track)

    - + Bitwuzla Fixedn 0 111134.692134.7083 @@ -219,7 +219,7 @@

    ABV (Unsat Core Track)

    - + Bitwuzla 0 111137.571137.6823 @@ -228,7 +228,7 @@

    ABV (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 00.00.00 @@ -252,7 +252,6 @@

    ABV (Unsat Core Track)

    - + - diff --git a/archive/2023/results/abvfp-proof-exhibition.html b/archive/2023/results/abvfp-proof-exhibition.html index 0c3142d9..963abdc3 100644 --- a/archive/2023/results/abvfp-proof-exhibition.html +++ b/archive/2023/results/abvfp-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    ABVFP (Proof Exhibition Track)

    Competition results for the ABVFP - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    ABVFP (Proof Exhibition Track)

    - + cvc5-lfsc 0 3 @@ -130,7 +130,7 @@

    ABVFP (Proof Exhibition Track)

    - + cvc5 0 1 @@ -152,7 +152,7 @@

    ABVFP (Proof Exhibition Track)

    - + cvc5-lfsc 0 322.16122.16100 @@ -161,7 +161,7 @@

    ABVFP (Proof Exhibition Track)

    - + cvc5 0 14.5374.5122 @@ -185,7 +185,6 @@

    ABVFP (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/abvfp-single-query.html b/archive/2023/results/abvfp-single-query.html index 034cc0e2..f8ae40d7 100644 --- a/archive/2023/results/abvfp-single-query.html +++ b/archive/2023/results/abvfp-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    ABVFP (Single Query Track)

    Competition results for the ABVFP - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    ABVFP (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5Bitwuzla - - + + cvc5 - - + + Bitwuzla - + @@ -131,7 +131,7 @@

    ABVFP (Single Query Track)

    - + 2022-z3-4.8.17n 0 43 @@ -142,7 +142,7 @@

    ABVFP (Single Query Track)

    - + cvc5 0 26 @@ -153,7 +153,7 @@

    ABVFP (Single Query Track)

    - + Bitwuzla Fixedn 0 24 @@ -164,7 +164,7 @@

    ABVFP (Single Query Track)

    - + Bitwuzla 0 24 @@ -175,7 +175,7 @@

    ABVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -197,7 +197,7 @@

    ABVFP (Single Query Track)

    - + 2022-z3-4.8.17n 0 439.3929.3143403178 @@ -206,7 +206,7 @@

    ABVFP (Single Query Track)

    - + cvc5 0 262964.1422998.094262333434 @@ -215,7 +215,7 @@

    ABVFP (Single Query Track)

    - + Bitwuzla Fixedn 0 2432.28832.29824240360 @@ -224,7 +224,7 @@

    ABVFP (Single Query Track)

    - + Bitwuzla 0 2432.66332.67524240360 @@ -233,7 +233,7 @@

    ABVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.0000600 @@ -253,7 +253,7 @@

    ABVFP (Single Query Track)

    - + 2022-z3-4.8.17n 0 409.2679.191404005158 @@ -262,7 +262,7 @@

    ABVFP (Single Query Track)

    - + Bitwuzla Fixedn 0 2432.28832.2982424021150 @@ -271,7 +271,7 @@

    ABVFP (Single Query Track)

    - + Bitwuzla 0 2432.66332.6752424021150 @@ -280,7 +280,7 @@

    ABVFP (Single Query Track)

    - + cvc5 0 232954.9422988.8923230221534 @@ -289,7 +289,7 @@

    ABVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.000045150 @@ -309,7 +309,7 @@

    ABVFP (Single Query Track)

    - + 2022-z3-4.8.17n 0 30.1250.123030578 @@ -318,7 +318,7 @@

    ABVFP (Single Query Track)

    - + cvc5 0 39.29.20430305734 @@ -327,7 +327,7 @@

    ABVFP (Single Query Track)

    - + Bitwuzla 0 00.00.00003570 @@ -336,7 +336,7 @@

    ABVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.00003570 @@ -345,7 +345,7 @@

    ABVFP (Single Query Track)

    - + Bitwuzla Fixedn 0 00.00.00003570 @@ -365,7 +365,7 @@

    ABVFP (Single Query Track)

    - + 2022-z3-4.8.17n 0 439.3929.3143403178 @@ -374,7 +374,7 @@

    ABVFP (Single Query Track)

    - + Bitwuzla 0 231.3281.32923230371 @@ -383,7 +383,7 @@

    ABVFP (Single Query Track)

    - + Bitwuzla Fixedn 0 231.3331.33523230371 @@ -392,7 +392,7 @@

    ABVFP (Single Query Track)

    - + cvc5 0 1913.18713.188191634141 @@ -401,7 +401,7 @@

    ABVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.0000600 @@ -425,7 +425,6 @@

    ABVFP (Single Query Track)

    - + - diff --git a/archive/2023/results/abvfp-unsat-core.html b/archive/2023/results/abvfp-unsat-core.html index 731570c4..01dc6f7a 100644 --- a/archive/2023/results/abvfp-unsat-core.html +++ b/archive/2023/results/abvfp-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    ABVFP (Unsat Core Track)

    Competition results for the ABVFP - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    ABVFP (Unsat Core Track)

    Sequential PerformanceParallel Performance —— - - + + @@ -126,7 +126,7 @@

    ABVFP (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -137,7 +137,7 @@

    ABVFP (Unsat Core Track)

    - + Bitwuzla Fixedn 0 0 @@ -148,7 +148,7 @@

    ABVFP (Unsat Core Track)

    - + 2022-z3-4.8.17n 0 0 @@ -159,7 +159,7 @@

    ABVFP (Unsat Core Track)

    - + Bitwuzla 0 0 @@ -170,7 +170,7 @@

    ABVFP (Unsat Core Track)

    - + cvc5 0 0 @@ -192,7 +192,7 @@

    ABVFP (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 00.00.00 @@ -201,7 +201,7 @@

    ABVFP (Unsat Core Track)

    - + Bitwuzla Fixedn 0 00.0730.0730 @@ -210,7 +210,7 @@

    ABVFP (Unsat Core Track)

    - + 2022-z3-4.8.17n 0 00.0920.0870 @@ -219,7 +219,7 @@

    ABVFP (Unsat Core Track)

    - + Bitwuzla 0 00.1230.1240 @@ -228,7 +228,7 @@

    ABVFP (Unsat Core Track)

    - + cvc5 0 00.8540.8531 @@ -252,7 +252,6 @@

    ABVFP (Unsat Core Track)

    - + - diff --git a/archive/2023/results/abvfplra-incremental.html b/archive/2023/results/abvfplra-incremental.html index eae09e99..bb943ce2 100644 --- a/archive/2023/results/abvfplra-incremental.html +++ b/archive/2023/results/abvfplra-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    ABVFPLRA (Incremental Track)

    Competition results for the ABVFPLRA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    ABVFPLRA (Incremental Track)

    Parallel Performance Bitwuzla - - + + @@ -124,7 +124,7 @@

    ABVFPLRA (Incremental Track)

    - + Bitwuzla 0 2269228.67229.0300 @@ -133,7 +133,7 @@

    ABVFPLRA (Incremental Track)

    - + cvc5 0 8189.389.9114511 @@ -142,7 +142,7 @@

    ABVFPLRA (Incremental Track)

    - + 2022-UltimateEliminator+MathSATn 0 818105.5580.5314510 @@ -151,7 +151,7 @@

    ABVFPLRA (Incremental Track)

    - + UltimateEliminator+MathSAT 0 27932.4515.5519900 @@ -175,7 +175,6 @@

    ABVFPLRA (Incremental Track)

    - + - diff --git a/archive/2023/results/abvfplra-proof-exhibition.html b/archive/2023/results/abvfplra-proof-exhibition.html index d1a1dbcb..2b1d70e2 100644 --- a/archive/2023/results/abvfplra-proof-exhibition.html +++ b/archive/2023/results/abvfplra-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    ABVFPLRA (Proof Exhibition Track)

    Competition results for the ABVFPLRA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    ABVFPLRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 1 @@ -130,7 +130,7 @@

    ABVFPLRA (Proof Exhibition Track)

    - + cvc5 0 0 @@ -152,7 +152,7 @@

    ABVFPLRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 10.150.14931 @@ -161,7 +161,7 @@

    ABVFPLRA (Proof Exhibition Track)

    - + cvc5 0 00.00.042 @@ -185,7 +185,6 @@

    ABVFPLRA (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/abvfplra-single-query.html b/archive/2023/results/abvfplra-single-query.html index 499abfcd..89cce403 100644 --- a/archive/2023/results/abvfplra-single-query.html +++ b/archive/2023/results/abvfplra-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    ABVFPLRA (Single Query Track)

    Competition results for the ABVFPLRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    ABVFPLRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + Bitwuzla - + @@ -131,7 +131,7 @@

    ABVFPLRA (Single Query Track)

    - + 2022-z3-4.8.17n 0 49 @@ -142,7 +142,7 @@

    ABVFPLRA (Single Query Track)

    - + cvc5 0 38 @@ -153,7 +153,7 @@

    ABVFPLRA (Single Query Track)

    - + Bitwuzla 0 33 @@ -164,7 +164,7 @@

    ABVFPLRA (Single Query Track)

    - + Bitwuzla Fixedn 0 33 @@ -175,7 +175,7 @@

    ABVFPLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -197,7 +197,7 @@

    ABVFPLRA (Single Query Track)

    - + 2022-z3-4.8.17n 0 49245.538245.459494722826 @@ -206,7 +206,7 @@

    ABVFPLRA (Single Query Track)

    - + cvc5 0 384279.3494294.91383443938 @@ -215,7 +215,7 @@

    ABVFPLRA (Single Query Track)

    - + Bitwuzla 0 3344.46244.46633321440 @@ -224,7 +224,7 @@

    ABVFPLRA (Single Query Track)

    - + Bitwuzla Fixedn 0 3344.53844.54633321440 @@ -233,7 +233,7 @@

    ABVFPLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.0000770 @@ -253,7 +253,7 @@

    ABVFPLRA (Single Query Track)

    - + 2022-z3-4.8.17n 0 472.9282.8384747003026 @@ -262,7 +262,7 @@

    ABVFPLRA (Single Query Track)

    - + cvc5 0 344268.6864284.23634340133038 @@ -271,7 +271,7 @@

    ABVFPLRA (Single Query Track)

    - + Bitwuzla 0 3244.38844.3923232015300 @@ -280,7 +280,7 @@

    ABVFPLRA (Single Query Track)

    - + Bitwuzla Fixedn 0 3244.46444.4733232015300 @@ -289,7 +289,7 @@

    ABVFPLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.000047300 @@ -309,7 +309,7 @@

    ABVFPLRA (Single Query Track)

    - + cvc5 0 410.66310.67440407338 @@ -318,7 +318,7 @@

    ABVFPLRA (Single Query Track)

    - + 2022-z3-4.8.17n 0 2242.611242.62120227326 @@ -327,7 +327,7 @@

    ABVFPLRA (Single Query Track)

    - + Bitwuzla Fixedn 0 10.0730.0741013730 @@ -336,7 +336,7 @@

    ABVFPLRA (Single Query Track)

    - + Bitwuzla 0 10.0740.0741013730 @@ -345,7 +345,7 @@

    ABVFPLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.00004730 @@ -365,7 +365,7 @@

    ABVFPLRA (Single Query Track)

    - + 2022-z3-4.8.17n 0 472.9282.838474703028 @@ -374,7 +374,7 @@

    ABVFPLRA (Single Query Track)

    - + Bitwuzla 0 3344.46244.46633321440 @@ -383,7 +383,7 @@

    ABVFPLRA (Single Query Track)

    - + Bitwuzla Fixedn 0 3344.53844.54633321440 @@ -392,7 +392,7 @@

    ABVFPLRA (Single Query Track)

    - + cvc5 0 2412.71312.714242045353 @@ -401,7 +401,7 @@

    ABVFPLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.0000770 @@ -425,7 +425,6 @@

    ABVFPLRA (Single Query Track)

    - + - diff --git a/archive/2023/results/abvfplra-unsat-core.html b/archive/2023/results/abvfplra-unsat-core.html index 4de48bf7..320c3ab7 100644 --- a/archive/2023/results/abvfplra-unsat-core.html +++ b/archive/2023/results/abvfplra-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    ABVFPLRA (Unsat Core Track)

    Competition results for the ABVFPLRA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    ABVFPLRA (Unsat Core Track)

    Sequential PerformanceParallel Performance —— - - + + @@ -126,7 +126,7 @@

    ABVFPLRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -137,7 +137,7 @@

    ABVFPLRA (Unsat Core Track)

    - + Bitwuzla Fixedn 0 0 @@ -148,7 +148,7 @@

    ABVFPLRA (Unsat Core Track)

    - + Bitwuzla 0 0 @@ -159,7 +159,7 @@

    ABVFPLRA (Unsat Core Track)

    - + cvc5 0 0 @@ -170,7 +170,7 @@

    ABVFPLRA (Unsat Core Track)

    - + 2022-z3-4.8.17n 0 0 @@ -192,7 +192,7 @@

    ABVFPLRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 00.00.00 @@ -201,7 +201,7 @@

    ABVFPLRA (Unsat Core Track)

    - + Bitwuzla Fixedn 0 00.0750.0750 @@ -210,7 +210,7 @@

    ABVFPLRA (Unsat Core Track)

    - + Bitwuzla 0 00.0760.0760 @@ -219,7 +219,7 @@

    ABVFPLRA (Unsat Core Track)

    - + cvc5 0 03.2083.2080 @@ -228,7 +228,7 @@

    ABVFPLRA (Unsat Core Track)

    - + 2022-z3-4.8.17n 0 0709.343709.6110 @@ -252,7 +252,6 @@

    ABVFPLRA (Unsat Core Track)

    - + - diff --git a/archive/2023/results/alia-incremental.html b/archive/2023/results/alia-incremental.html index e1cd7004..733fb6fa 100644 --- a/archive/2023/results/alia-incremental.html +++ b/archive/2023/results/alia-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    ALIA (Incremental Track)

    Competition results for the ALIA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    ALIA (Incremental Track)

    Parallel Performance cvc5 - - + + @@ -124,7 +124,7 @@

    ALIA (Incremental Track)

    - + 2021-z3n 0 202552176.6163.9600 @@ -133,7 +133,7 @@

    ALIA (Incremental Track)

    - + cvc5 0 202550498.64481.8920 @@ -142,7 +142,7 @@

    ALIA (Incremental Track)

    - + SMTInterpol 0 202525993.87421.61270 @@ -151,7 +151,7 @@

    ALIA (Incremental Track)

    - + UltimateEliminator+MathSAT 0 18916913714.5312474.85133833 @@ -175,7 +175,6 @@

    ALIA (Incremental Track)

    - + - diff --git a/archive/2023/results/alia-proof-exhibition.html b/archive/2023/results/alia-proof-exhibition.html index 14dfa2f6..5df26611 100644 --- a/archive/2023/results/alia-proof-exhibition.html +++ b/archive/2023/results/alia-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    ALIA (Proof Exhibition Track)

    Competition results for the ALIA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    ALIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 276 @@ -130,7 +130,7 @@

    ALIA (Proof Exhibition Track)

    - + cvc5 0 267 @@ -141,7 +141,7 @@

    ALIA (Proof Exhibition Track)

    - + SMTInterpol 0 251 @@ -163,7 +163,7 @@

    ALIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 2765770.2965769.609166139 @@ -172,7 +172,7 @@

    ALIA (Proof Exhibition Track)

    - + cvc5 0 2676757.1796749.375175140 @@ -181,7 +181,7 @@

    ALIA (Proof Exhibition Track)

    - + SMTInterpol 0 25115055.7712598.652191132 @@ -205,7 +205,6 @@

    ALIA (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/alia-single-query.html b/archive/2023/results/alia-single-query.html index add97748..a419dab4 100644 --- a/archive/2023/results/alia-single-query.html +++ b/archive/2023/results/alia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    ALIA (Single Query Track)

    Competition results for the ALIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    ALIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) VampireVampireSMTInterpol - - + + Vampire - - + + Vampire - + @@ -131,7 +131,7 @@

    ALIA (Single Query Track)

    - + Vampire 0 401 @@ -142,7 +142,7 @@

    ALIA (Single Query Track)

    - + cvc5 0 294 @@ -153,7 +153,7 @@

    ALIA (Single Query Track)

    - + 2022-cvc5n 0 290 @@ -164,7 +164,7 @@

    ALIA (Single Query Track)

    - + iProver Fixedn 0 263 @@ -175,7 +175,7 @@

    ALIA (Single Query Track)

    - + iProver 0 262 @@ -186,7 +186,7 @@

    ALIA (Single Query Track)

    - + SMTInterpol 0 239 @@ -197,7 +197,7 @@

    ALIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 5 @@ -219,7 +219,7 @@

    ALIA (Single Query Track)

    - + Vampire 0 40631153.9447879.408406040611311131 @@ -228,7 +228,7 @@

    ALIA (Single Query Track)

    - + cvc5 0 29411789.94711847.707294182761243648 @@ -237,7 +237,7 @@

    ALIA (Single Query Track)

    - + 2022-cvc5n 0 29011699.02411710.925290182721247652 @@ -246,7 +246,7 @@

    ALIA (Single Query Track)

    - + iProver Fixedn 0 27137231.9389567.291271027112661258 @@ -255,7 +255,7 @@

    ALIA (Single Query Track)

    - + iProver 0 27035231.659013.574270027012671259 @@ -264,7 +264,7 @@

    ALIA (Single Query Track)

    - + SMTInterpol 0 23914657.26112482.516239192201298464 @@ -273,7 +273,7 @@

    ALIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 564.67130.44755015320 @@ -293,7 +293,7 @@

    ALIA (Single Query Track)

    - + SMTInterpol 0 1931.71716.2291919031515464 @@ -302,7 +302,7 @@

    ALIA (Single Query Track)

    - + cvc5 0 181012.271014.3771818041515648 @@ -311,7 +311,7 @@

    ALIA (Single Query Track)

    - + 2022-cvc5n 0 181029.091032.2371818041515652 @@ -320,7 +320,7 @@

    ALIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 564.67130.4475501715150 @@ -329,7 +329,7 @@

    ALIA (Single Query Track)

    - + Vampire 0 00.00.00002215151131 @@ -338,7 +338,7 @@

    ALIA (Single Query Track)

    - + iProver 0 00.00.00002215151259 @@ -347,7 +347,7 @@

    ALIA (Single Query Track)

    - + iProver Fixedn 0 00.00.00002215151258 @@ -367,7 +367,7 @@

    ALIA (Single Query Track)

    - + Vampire 0 40631153.9447879.4084060406511261131 @@ -376,7 +376,7 @@

    ALIA (Single Query Track)

    - + cvc5 0 27610777.67710833.3327602761351126648 @@ -385,7 +385,7 @@

    ALIA (Single Query Track)

    - + 2022-cvc5n 0 27210669.93410678.68927202721391126652 @@ -394,7 +394,7 @@

    ALIA (Single Query Track)

    - + iProver Fixedn 0 27137231.9389567.291271027114011261258 @@ -403,7 +403,7 @@

    ALIA (Single Query Track)

    - + iProver 0 27035231.659013.574270027014111261259 @@ -412,7 +412,7 @@

    ALIA (Single Query Track)

    - + SMTInterpol 0 22014625.54412466.28722002201911126464 @@ -421,7 +421,7 @@

    ALIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.000041111260 @@ -441,7 +441,7 @@

    ALIA (Single Query Track)

    - + Vampire 0 3641945.904534.246364036411731173 @@ -450,7 +450,7 @@

    ALIA (Single Query Track)

    - + 2022-cvc5n 0 244189.296187.729244132311293949 @@ -459,7 +459,7 @@

    ALIA (Single Query Track)

    - + cvc5 0 242174.606174.537242132291295947 @@ -468,7 +468,7 @@

    ALIA (Single Query Track)

    - + iProver 0 2242900.973814.363224022413131306 @@ -477,7 +477,7 @@

    ALIA (Single Query Track)

    - + iProver Fixedn 0 2243017.297842.835224022413131306 @@ -486,7 +486,7 @@

    ALIA (Single Query Track)

    - + SMTInterpol 0 193575.746322.774193191741344558 @@ -495,7 +495,7 @@

    ALIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 564.67130.447550153224 @@ -519,7 +519,6 @@

    ALIA (Single Query Track)

    - + - diff --git a/archive/2023/results/alia-unsat-core.html b/archive/2023/results/alia-unsat-core.html index 1edacba5..29bcc919 100644 --- a/archive/2023/results/alia-unsat-core.html +++ b/archive/2023/results/alia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    ALIA (Unsat Core Track)

    Competition results for the ALIA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    ALIA (Unsat Core Track)

    Sequential PerformanceParallel Performance SMTInterpolSMTInterpol - - + + @@ -126,7 +126,7 @@

    ALIA (Unsat Core Track)

    - + 2021-cvc5-ucn 0 1420 @@ -137,7 +137,7 @@

    ALIA (Unsat Core Track)

    - + SMTInterpol 0 1213 @@ -148,7 +148,7 @@

    ALIA (Unsat Core Track)

    - + cvc5 0 754 @@ -159,7 +159,7 @@

    ALIA (Unsat Core Track)

    - + Vampire 0 167 @@ -170,7 +170,7 @@

    ALIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -192,7 +192,7 @@

    ALIA (Unsat Core Track)

    - + 2021-cvc5-ucn 0 14202230.1582230.596136 @@ -201,7 +201,7 @@

    ALIA (Unsat Core Track)

    - + SMTInterpol 0 122114725.92812464.654132 @@ -210,7 +210,7 @@

    ALIA (Unsat Core Track)

    - + cvc5 0 7547310.67311.846134 @@ -219,7 +219,7 @@

    ALIA (Unsat Core Track)

    - + Vampire 0 1671054.934276.4242 @@ -228,7 +228,7 @@

    ALIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 00.00.00 @@ -252,7 +252,6 @@

    ALIA (Unsat Core Track)

    - + - diff --git a/archive/2023/results/ania-incremental.html b/archive/2023/results/ania-incremental.html index 6edeba5d..9221d5a9 100644 --- a/archive/2023/results/ania-incremental.html +++ b/archive/2023/results/ania-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    ANIA (Incremental Track)

    Competition results for the ANIA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    ANIA (Incremental Track)

    Parallel Performance cvc5 - - + + @@ -124,7 +124,7 @@

    ANIA (Incremental Track)

    - + cvc5 0 2348858.1555.8300 @@ -133,7 +133,7 @@

    ANIA (Incremental Track)

    - + SMTInterpol 0 23486129.9747.4520 @@ -142,7 +142,7 @@

    ANIA (Incremental Track)

    - + UltimateEliminator+MathSAT 0 95643666.083577.05139243 @@ -166,7 +166,6 @@

    ANIA (Incremental Track)

    - + - diff --git a/archive/2023/results/ania-proof-exhibition.html b/archive/2023/results/ania-proof-exhibition.html index e9f55cf1..dabf049c 100644 --- a/archive/2023/results/ania-proof-exhibition.html +++ b/archive/2023/results/ania-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    ANIA (Proof Exhibition Track)

    Competition results for the ANIA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    ANIA (Proof Exhibition Track)

    - + cvc5 0 0 @@ -130,7 +130,7 @@

    ANIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 0 @@ -152,7 +152,7 @@

    ANIA (Proof Exhibition Track)

    - + cvc5 0 00.00.011 @@ -161,7 +161,7 @@

    ANIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 00.00.011 @@ -185,7 +185,6 @@

    ANIA (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/ania-single-query.html b/archive/2023/results/ania-single-query.html index 165fc5bf..cab700de 100644 --- a/archive/2023/results/ania-single-query.html +++ b/archive/2023/results/ania-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    ANIA (Single Query Track)

    Competition results for the ANIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    ANIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + iProver - - + + cvc5 - + @@ -131,7 +131,7 @@

    ANIA (Single Query Track)

    - + cvc5 0 3 @@ -142,7 +142,7 @@

    ANIA (Single Query Track)

    - + iProver 0 1 @@ -153,7 +153,7 @@

    ANIA (Single Query Track)

    - + iProver Fixedn 0 1 @@ -164,7 +164,7 @@

    ANIA (Single Query Track)

    - + Vampire 0 0 @@ -175,7 +175,7 @@

    ANIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -197,7 +197,7 @@

    ANIA (Single Query Track)

    - + cvc5 0 30.0620.06330535 @@ -206,7 +206,7 @@

    ANIA (Single Query Track)

    - + iProver 0 11.910.6511015555 @@ -215,7 +215,7 @@

    ANIA (Single Query Track)

    - + iProver Fixedn 0 11.960.6521015555 @@ -224,7 +224,7 @@

    ANIA (Single Query Track)

    - + Vampire 0 00.00.0000560 @@ -233,7 +233,7 @@

    ANIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.0000560 @@ -253,7 +253,7 @@

    ANIA (Single Query Track)

    - + cvc5 0 30.0620.063300535 @@ -262,7 +262,7 @@

    ANIA (Single Query Track)

    - + Vampire 0 00.00.00003530 @@ -271,7 +271,7 @@

    ANIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.00003530 @@ -280,7 +280,7 @@

    ANIA (Single Query Track)

    - + iProver 0 00.00.000035355 @@ -289,7 +289,7 @@

    ANIA (Single Query Track)

    - + iProver Fixedn 0 00.00.000035355 @@ -309,7 +309,7 @@

    ANIA (Single Query Track)

    - + iProver 0 11.910.65110105555 @@ -318,7 +318,7 @@

    ANIA (Single Query Track)

    - + iProver Fixedn 0 11.960.65210105555 @@ -327,7 +327,7 @@

    ANIA (Single Query Track)

    - + Vampire 0 00.00.00001550 @@ -336,7 +336,7 @@

    ANIA (Single Query Track)

    - + cvc5 0 00.00.00001555 @@ -345,7 +345,7 @@

    ANIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.00001550 @@ -365,7 +365,7 @@

    ANIA (Single Query Track)

    - + cvc5 0 30.0620.06330535 @@ -374,7 +374,7 @@

    ANIA (Single Query Track)

    - + iProver 0 11.910.6511015555 @@ -383,7 +383,7 @@

    ANIA (Single Query Track)

    - + iProver Fixedn 0 11.960.6521015555 @@ -392,7 +392,7 @@

    ANIA (Single Query Track)

    - + Vampire 0 00.00.0000560 @@ -401,7 +401,7 @@

    ANIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.0000561 @@ -425,7 +425,6 @@

    ANIA (Single Query Track)

    - + - diff --git a/archive/2023/results/ania-unsat-core.html b/archive/2023/results/ania-unsat-core.html index d82c2561..94ea9690 100644 --- a/archive/2023/results/ania-unsat-core.html +++ b/archive/2023/results/ania-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    ANIA (Unsat Core Track)

    Competition results for the ANIA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    ANIA (Unsat Core Track)

    Sequential PerformanceParallel Performance —— - - + + @@ -126,7 +126,7 @@

    ANIA (Unsat Core Track)

    - + Vampire 0 0 @@ -137,7 +137,7 @@

    ANIA (Unsat Core Track)

    - + cvc5 0 0 @@ -148,7 +148,7 @@

    ANIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -170,7 +170,7 @@

    ANIA (Unsat Core Track)

    - + Vampire 0 00.00.00 @@ -179,7 +179,7 @@

    ANIA (Unsat Core Track)

    - + cvc5 0 00.00.01 @@ -188,7 +188,7 @@

    ANIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 00.00.00 @@ -212,7 +212,6 @@

    ANIA (Unsat Core Track)

    - + - diff --git a/archive/2023/results/arith-cloud.html b/archive/2023/results/arith-cloud.html index 831660ac..f5ff4c00 100644 --- a/archive/2023/results/arith-cloud.html +++ b/archive/2023/results/arith-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Arith (Cloud Track)

    Competition results for the Arith - + division - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    Arith (Cloud Track)

    - + Vampire 0 293.262202900 @@ -126,7 +126,7 @@

    Arith (Cloud Track)

    - + cvc5 0 00.00001100 @@ -146,7 +146,7 @@

    Arith (Cloud Track)

    - + Vampire 0 00.00000110 @@ -155,7 +155,7 @@

    Arith (Cloud Track)

    - + cvc5 0 00.00000110 @@ -175,7 +175,7 @@

    Arith (Cloud Track)

    - + Vampire 0 293.262202360 @@ -184,7 +184,7 @@

    Arith (Cloud Track)

    - + cvc5 0 00.0000560 @@ -204,7 +204,7 @@

    Arith (Cloud Track)

    - + Vampire 0 00.000011011 @@ -213,7 +213,7 @@

    Arith (Cloud Track)

    - + cvc5 0 00.000011011 @@ -237,7 +237,6 @@

    Arith (Cloud Track)

    - + - diff --git a/archive/2023/results/arith-incremental.html b/archive/2023/results/arith-incremental.html index fa7d1455..14151a4f 100644 --- a/archive/2023/results/arith-incremental.html +++ b/archive/2023/results/arith-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Arith (Incremental Track)

    Competition results for the Arith - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    Arith (Incremental Track)

    Parallel Performance cvc5 - - + + @@ -124,7 +124,7 @@

    Arith (Incremental Track)

    - + 2021-cvc5-incn 0 4136294.6193.49000 @@ -133,7 +133,7 @@

    Arith (Incremental Track)

    - + cvc5 0 41362171.36170.32000 @@ -142,7 +142,7 @@

    Arith (Incremental Track)

    - + UltimateEliminator+MathSAT 0 41362698.81484.89000 @@ -151,7 +151,7 @@

    Arith (Incremental Track)

    - + SMTInterpol 0 38114209.0684.82324801 @@ -175,7 +175,6 @@

    Arith (Incremental Track)

    - + - diff --git a/archive/2023/results/arith-parallel.html b/archive/2023/results/arith-parallel.html index 42aef02b..27f3d38d 100644 --- a/archive/2023/results/arith-parallel.html +++ b/archive/2023/results/arith-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Arith (Parallel Track)

    Competition results for the Arith - + division - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    Arith (Parallel Track)

    - + Vampire 0 22.621202900 @@ -126,7 +126,7 @@

    Arith (Parallel Track)

    - + iProver 0 00.00001100 @@ -146,7 +146,7 @@

    Arith (Parallel Track)

    - + Vampire 0 00.00000110 @@ -155,7 +155,7 @@

    Arith (Parallel Track)

    - + iProver 0 00.00000110 @@ -175,7 +175,7 @@

    Arith (Parallel Track)

    - + Vampire 0 22.621202360 @@ -184,7 +184,7 @@

    Arith (Parallel Track)

    - + iProver 0 00.0000560 @@ -204,7 +204,7 @@

    Arith (Parallel Track)

    - + Vampire 0 22.621202909 @@ -213,7 +213,7 @@

    Arith (Parallel Track)

    - + iProver 0 00.000011011 @@ -237,7 +237,6 @@

    Arith (Parallel Track)

    - + - diff --git a/archive/2023/results/arith-proof-exhibition.html b/archive/2023/results/arith-proof-exhibition.html index d6253706..bbb31bc9 100644 --- a/archive/2023/results/arith-proof-exhibition.html +++ b/archive/2023/results/arith-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Arith (Proof Exhibition Track)

    Competition results for the Arith - + division - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    Arith (Proof Exhibition Track)

    - + cvc5-lfsc 0 1582 @@ -130,7 +130,7 @@

    Arith (Proof Exhibition Track)

    - + cvc5 0 1573 @@ -141,7 +141,7 @@

    Arith (Proof Exhibition Track)

    - + SMTInterpol 0 267 @@ -163,7 +163,7 @@

    Arith (Proof Exhibition Track)

    - + cvc5-lfsc 0 15829514.4949510.471180116 @@ -172,7 +172,7 @@

    Arith (Proof Exhibition Track)

    - + cvc5 0 15739948.6499919.2771270124 @@ -181,7 +181,7 @@

    Arith (Proof Exhibition Track)

    - + SMTInterpol 0 2672000.917927.697308112513 @@ -205,7 +205,6 @@

    Arith (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/arith-single-query.html b/archive/2023/results/arith-single-query.html index a99e4bc0..41c8644d 100644 --- a/archive/2023/results/arith-single-query.html +++ b/archive/2023/results/arith-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Arith (Single Query Track)

    Competition results for the Arith - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    Arith (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5YicesQS - - + + cvc5 - - + + YicesQS - + @@ -131,7 +131,7 @@

    Arith (Single Query Track)

    - + cvc5 0 1441 @@ -142,7 +142,7 @@

    Arith (Single Query Track)

    - + 2021-z3n 0 1417 @@ -153,7 +153,7 @@

    Arith (Single Query Track)

    - + YicesQS 0 1386 @@ -164,7 +164,7 @@

    Arith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 1244 @@ -175,7 +175,7 @@

    Arith (Single Query Track)

    - + iProver Fixedn 0 404 @@ -186,7 +186,7 @@

    Arith (Single Query Track)

    - + SMTInterpol 0 254 @@ -197,7 +197,7 @@

    Arith (Single Query Track)

    - + iProver 22 418 @@ -208,7 +208,7 @@

    Arith (Single Query Track)

    - + Vampire 130 823 @@ -230,7 +230,7 @@

    Arith (Single Query Track)

    - + cvc5 0 144128855.85928902.35914415498922190219 @@ -239,7 +239,7 @@

    Arith (Single Query Track)

    - + 2021-z3n 0 141739094.82839065.15114175708472430153 @@ -248,7 +248,7 @@

    Arith (Single Query Track)

    - + YicesQS 0 13863691.0883691.52413865668202740274 @@ -257,7 +257,7 @@

    Arith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 124632321.03225856.51612464597874140285 @@ -266,7 +266,7 @@

    Arith (Single Query Track)

    - + iProver Fixedn 0 42879167.80820264.8474280428123201221 @@ -275,7 +275,7 @@

    Arith (Single Query Track)

    - + SMTInterpol 0 2542763.931916.867254102441059347133 @@ -284,7 +284,7 @@

    Arith (Single Query Track)

    - + iProver 22 44799991.71625483.9014470447121301180 @@ -293,7 +293,7 @@

    Arith (Single Query Track)

    - + Vampire 130 83180345.3820342.61683108318290699 @@ -313,7 +313,7 @@

    Arith (Single Query Track)

    - + 2021-z3n 0 5705490.0375471.9625705700541036153 @@ -322,7 +322,7 @@

    Arith (Single Query Track)

    - + YicesQS 0 5662207.8462208.065665660581036274 @@ -331,7 +331,7 @@

    Arith (Single Query Track)

    - + cvc5 0 5499758.4659774.7485495490751036219 @@ -340,7 +340,7 @@

    Arith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 45916498.44613399.22745945901651036285 @@ -349,7 +349,7 @@

    Arith (Single Query Track)

    - + SMTInterpol 0 105.5563.869101005281122133 @@ -358,7 +358,7 @@

    Arith (Single Query Track)

    - + Vampire 0 00.00.00006241036699 @@ -367,7 +367,7 @@

    Arith (Single Query Track)

    - + iProver 0 00.00.000062410361180 @@ -376,7 +376,7 @@

    Arith (Single Query Track)

    - + iProver Fixedn 0 00.00.000062410361221 @@ -396,7 +396,7 @@

    Arith (Single Query Track)

    - + cvc5 0 89219097.39419127.6118920892139629219 @@ -405,7 +405,7 @@

    Arith (Single Query Track)

    - + 2021-z3n 0 84733604.79233593.1898470847184629153 @@ -414,7 +414,7 @@

    Arith (Single Query Track)

    - + YicesQS 0 8201483.2411483.4648200820211629274 @@ -423,7 +423,7 @@

    Arith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 78715822.58612457.2897870787244629285 @@ -432,7 +432,7 @@

    Arith (Single Query Track)

    - + iProver Fixedn 0 42879167.80820264.84742804286036291221 @@ -441,7 +441,7 @@

    Arith (Single Query Track)

    - + SMTInterpol 0 2442758.3731912.9972440244531885133 @@ -450,7 +450,7 @@

    Arith (Single Query Track)

    - + iProver 22 44799991.71625483.90144704475846291180 @@ -459,7 +459,7 @@

    Arith (Single Query Track)

    - + Vampire 130 83180345.3820342.6168310831200629699 @@ -479,7 +479,7 @@

    Arith (Single Query Track)

    - + YicesQS 0 1365289.166289.37713655518142950295 @@ -488,7 +488,7 @@

    Arith (Single Query Track)

    - + cvc5 0 1308480.656466.17413084808283520352 @@ -497,7 +497,7 @@

    Arith (Single Query Track)

    - + 2021-z3n 0 12841389.4771357.31112845427423760295 @@ -506,7 +506,7 @@

    Arith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 11559753.8414846.03211554187375050377 @@ -515,7 +515,7 @@

    Arith (Single Query Track)

    - + iProver Fixedn 0 3424190.9721189.9513420342131801307 @@ -524,7 +524,7 @@

    Arith (Single Query Track)

    - + SMTInterpol 0 247992.385406.564247102371066347200 @@ -533,7 +533,7 @@

    Arith (Single Query Track)

    - + iProver 16 3544435.5131256.5463540354130601279 @@ -542,7 +542,7 @@

    Arith (Single Query Track)

    - + Vampire 127 5192981.029818.7615190519114101014 @@ -566,7 +566,6 @@

    Arith (Single Query Track)

    - + - diff --git a/archive/2023/results/arith-unsat-core.html b/archive/2023/results/arith-unsat-core.html index 9de312c0..df829489 100644 --- a/archive/2023/results/arith-unsat-core.html +++ b/archive/2023/results/arith-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Arith (Unsat Core Track)

    Competition results for the Arith - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    Arith (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    Arith (Unsat Core Track)

    - + cvc5 0 266 @@ -137,7 +137,7 @@

    Arith (Unsat Core Track)

    - + 2022-cvc5n 0 251 @@ -148,7 +148,7 @@

    Arith (Unsat Core Track)

    - + SMTInterpol 0 0 @@ -159,7 +159,7 @@

    Arith (Unsat Core Track)

    - + Vampire 0 0 @@ -170,7 +170,7 @@

    Arith (Unsat Core Track)

    - + UltimateEliminator+MathSAT 50 131 @@ -192,7 +192,7 @@

    Arith (Unsat Core Track)

    - + cvc5 0 266428.109428.00720 @@ -201,7 +201,7 @@

    Arith (Unsat Core Track)

    - + 2022-cvc5n 0 251115.389115.37119 @@ -210,7 +210,7 @@

    Arith (Unsat Core Track)

    - + SMTInterpol 0 0114.96478.28318 @@ -219,7 +219,7 @@

    Arith (Unsat Core Track)

    - + Vampire 0 015177.2113840.13676 @@ -228,7 +228,7 @@

    Arith (Unsat Core Track)

    - + UltimateEliminator+MathSAT 50 1313363.6042226.69219 @@ -252,7 +252,6 @@

    Arith (Unsat Core Track)

    - + - diff --git a/archive/2023/results/aufbv-proof-exhibition.html b/archive/2023/results/aufbv-proof-exhibition.html index f7f89a16..74fe9134 100644 --- a/archive/2023/results/aufbv-proof-exhibition.html +++ b/archive/2023/results/aufbv-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFBV (Proof Exhibition Track)

    Competition results for the AUFBV - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    AUFBV (Proof Exhibition Track)

    - + cvc5-lfsc 0 61 @@ -130,7 +130,7 @@

    AUFBV (Proof Exhibition Track)

    - + cvc5 0 3 @@ -152,7 +152,7 @@

    AUFBV (Proof Exhibition Track)

    - + cvc5-lfsc 0 619775.1099775.98229286 @@ -161,7 +161,7 @@

    AUFBV (Proof Exhibition Track)

    - + cvc5 0 3286.044281.684350142 @@ -185,7 +185,6 @@

    AUFBV (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/aufbv-single-query.html b/archive/2023/results/aufbv-single-query.html index 1ebb9b5e..859535f3 100644 --- a/archive/2023/results/aufbv-single-query.html +++ b/archive/2023/results/aufbv-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFBV (Single Query Track)

    Competition results for the AUFBV - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    AUFBV (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) BitwuzlaBitwuzlaBitwuzla - - + + Bitwuzla - - + + Bitwuzla - + @@ -131,7 +131,7 @@

    AUFBV (Single Query Track)

    - + Bitwuzla 0 504 @@ -142,7 +142,7 @@

    AUFBV (Single Query Track)

    - + Bitwuzla Fixedn 0 504 @@ -153,7 +153,7 @@

    AUFBV (Single Query Track)

    - + 2022-z3-4.8.17n 0 207 @@ -164,7 +164,7 @@

    AUFBV (Single Query Track)

    - + cvc5 0 186 @@ -175,7 +175,7 @@

    AUFBV (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 0 7 @@ -186,7 +186,7 @@

    AUFBV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 6 @@ -208,7 +208,7 @@

    AUFBV (Single Query Track)

    - + Bitwuzla 0 50433946.77933903.748504155349257246 @@ -217,7 +217,7 @@

    AUFBV (Single Query Track)

    - + Bitwuzla Fixedn 0 50434366.79234286.111504155349257246 @@ -226,7 +226,7 @@

    AUFBV (Single Query Track)

    - + 2022-z3-4.8.17n 0 20720354.2720342.23220781126554488 @@ -235,7 +235,7 @@

    AUFBV (Single Query Track)

    - + cvc5 0 18632753.36534416.05718610176575485 @@ -244,7 +244,7 @@

    AUFBV (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 0 71353.0611149.311707754196 @@ -253,7 +253,7 @@

    AUFBV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 61162.631057.648606755257 @@ -273,7 +273,7 @@

    AUFBV (Single Query Track)

    - + Bitwuzla 0 1556515.8626515.15215515501605246 @@ -282,7 +282,7 @@

    AUFBV (Single Query Track)

    - + Bitwuzla Fixedn 0 1556555.2926524.77615515501605246 @@ -291,7 +291,7 @@

    AUFBV (Single Query Track)

    - + 2022-z3-4.8.17n 0 814780.2024765.7778181075605488 @@ -300,7 +300,7 @@

    AUFBV (Single Query Track)

    - + cvc5 0 102597.5252653.31610100146605485 @@ -309,7 +309,7 @@

    AUFBV (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 0 00.00.0000156605196 @@ -318,7 +318,7 @@

    AUFBV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.0000156605257 @@ -338,7 +338,7 @@

    AUFBV (Single Query Track)

    - + Bitwuzla 0 34927430.91727388.596349034911401246 @@ -347,7 +347,7 @@

    AUFBV (Single Query Track)

    - + Bitwuzla Fixedn 0 34927811.527761.335349034911401246 @@ -356,7 +356,7 @@

    AUFBV (Single Query Track)

    - + cvc5 0 17630155.8431762.7411760176184401485 @@ -365,7 +365,7 @@

    AUFBV (Single Query Track)

    - + 2022-z3-4.8.17n 0 12615574.06815576.4551260126234401488 @@ -374,7 +374,7 @@

    AUFBV (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 0 71353.0611149.311707353401196 @@ -383,7 +383,7 @@

    AUFBV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 61162.631057.648606354401257 @@ -403,7 +403,7 @@

    AUFBV (Single Query Track)

    - + Bitwuzla Fixedn 0 3501367.4181363.352350119231411407 @@ -412,7 +412,7 @@

    AUFBV (Single Query Track)

    - + Bitwuzla 0 3501369.6721363.803350119231411407 @@ -421,7 +421,7 @@

    AUFBV (Single Query Track)

    - + 2022-z3-4.8.17n 0 138553.236538.2321386474623611 @@ -430,7 +430,7 @@

    AUFBV (Single Query Track)

    - + cvc5 0 125465.24462.7111252123636625 @@ -439,7 +439,7 @@

    AUFBV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 480.04838.926404757298 @@ -448,7 +448,7 @@

    AUFBV (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 0 235.6711.756202759277 @@ -472,7 +472,6 @@

    AUFBV (Single Query Track)

    - + - diff --git a/archive/2023/results/aufbvdtlia-proof-exhibition.html b/archive/2023/results/aufbvdtlia-proof-exhibition.html index 50a0f11c..f1992b97 100644 --- a/archive/2023/results/aufbvdtlia-proof-exhibition.html +++ b/archive/2023/results/aufbvdtlia-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFBVDTLIA (Proof Exhibition Track)

    Competition results for the AUFBVDTLIA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    AUFBVDTLIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 121 @@ -130,7 +130,7 @@

    AUFBVDTLIA (Proof Exhibition Track)

    - + cvc5 0 100 @@ -152,7 +152,7 @@

    AUFBVDTLIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 121968.364967.2946464 @@ -161,7 +161,7 @@

    AUFBVDTLIA (Proof Exhibition Track)

    - + cvc5 0 100583.557574.2758583 @@ -185,7 +185,6 @@

    AUFBVDTLIA (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/aufbvdtlia-single-query.html b/archive/2023/results/aufbvdtlia-single-query.html index 3a2aae16..e73f9289 100644 --- a/archive/2023/results/aufbvdtlia-single-query.html +++ b/archive/2023/results/aufbvdtlia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFBVDTLIA (Single Query Track)

    Competition results for the AUFBVDTLIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    AUFBVDTLIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    AUFBVDTLIA (Single Query Track)

    - + cvc5 0 337 @@ -142,7 +142,7 @@

    AUFBVDTLIA (Single Query Track)

    - + 2022-z3-4.8.17n 0 155 @@ -153,7 +153,7 @@

    AUFBVDTLIA (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 0 2 @@ -175,7 +175,7 @@

    AUFBVDTLIA (Single Query Track)

    - + cvc5 0 33761125.9464811.712337174163405364 @@ -184,7 +184,7 @@

    AUFBVDTLIA (Single Query Track)

    - + 2022-z3-4.8.17n 0 1553337.9993338.0811555897587545 @@ -193,7 +193,7 @@

    AUFBVDTLIA (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 0 223.178.74120274017 @@ -213,7 +213,7 @@

    AUFBVDTLIA (Single Query Track)

    - + cvc5 0 17439246.27142152.26117417403565364 @@ -222,7 +222,7 @@

    AUFBVDTLIA (Single Query Track)

    - + 2022-z3-4.8.17n 0 583305.1773305.44258580119565545 @@ -231,7 +231,7 @@

    AUFBVDTLIA (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 0 00.00.000017756517 @@ -251,7 +251,7 @@

    AUFBVDTLIA (Single Query Track)

    - + cvc5 0 16321879.66822659.4516301630579364 @@ -260,7 +260,7 @@

    AUFBVDTLIA (Single Query Track)

    - + 2022-z3-4.8.17n 0 9732.82232.6399709766579545 @@ -269,7 +269,7 @@

    AUFBVDTLIA (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 0 223.178.74120216157917 @@ -289,7 +289,7 @@

    AUFBVDTLIA (Single Query Track)

    - + 2022-z3-4.8.17n 0 14973.61173.3441495297593585 @@ -298,7 +298,7 @@

    AUFBVDTLIA (Single Query Track)

    - + cvc5 0 10633.13233.691066100636636 @@ -307,7 +307,7 @@

    AUFBVDTLIA (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 0 223.178.74120274020 @@ -331,7 +331,6 @@

    AUFBVDTLIA (Single Query Track)

    - + - diff --git a/archive/2023/results/aufbvdtnira-proof-exhibition.html b/archive/2023/results/aufbvdtnira-proof-exhibition.html index a6d70722..dddaa7e6 100644 --- a/archive/2023/results/aufbvdtnira-proof-exhibition.html +++ b/archive/2023/results/aufbvdtnira-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFBVDTNIRA (Proof Exhibition Track)

    Competition results for the AUFBVDTNIRA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    AUFBVDTNIRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 69 @@ -130,7 +130,7 @@

    AUFBVDTNIRA (Proof Exhibition Track)

    - + cvc5 0 6 @@ -152,7 +152,7 @@

    AUFBVDTNIRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 692107.0042076.753462262 @@ -161,7 +161,7 @@

    AUFBVDTNIRA (Proof Exhibition Track)

    - + cvc5 0 6292.631289.755525326 @@ -185,7 +185,6 @@

    AUFBVDTNIRA (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/aufbvfp-proof-exhibition.html b/archive/2023/results/aufbvfp-proof-exhibition.html index 8cbd85e8..3e730e9d 100644 --- a/archive/2023/results/aufbvfp-proof-exhibition.html +++ b/archive/2023/results/aufbvfp-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFBVFP (Proof Exhibition Track)

    Competition results for the AUFBVFP - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    AUFBVFP (Proof Exhibition Track)

    - + cvc5-lfsc 0 5 @@ -130,7 +130,7 @@

    AUFBVFP (Proof Exhibition Track)

    - + cvc5 0 0 @@ -152,7 +152,7 @@

    AUFBVFP (Proof Exhibition Track)

    - + cvc5-lfsc 0 52046.8252047.062229 @@ -161,7 +161,7 @@

    AUFBVFP (Proof Exhibition Track)

    - + cvc5 0 00.00.02715 @@ -185,7 +185,6 @@

    AUFBVFP (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/aufbvfp-single-query.html b/archive/2023/results/aufbvfp-single-query.html index 59429503..ef386017 100644 --- a/archive/2023/results/aufbvfp-single-query.html +++ b/archive/2023/results/aufbvfp-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFBVFP (Single Query Track)

    Competition results for the AUFBVFP - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    AUFBVFP (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) BitwuzlaBitwuzlaBitwuzla - - + + Bitwuzla - - + + Bitwuzla - + @@ -131,7 +131,7 @@

    AUFBVFP (Single Query Track)

    - + Bitwuzla 0 35 @@ -142,7 +142,7 @@

    AUFBVFP (Single Query Track)

    - + Bitwuzla Fixedn 0 35 @@ -153,7 +153,7 @@

    AUFBVFP (Single Query Track)

    - + cvc5 0 19 @@ -164,7 +164,7 @@

    AUFBVFP (Single Query Track)

    - + 2022-z3-4.8.17n 0 7 @@ -175,7 +175,7 @@

    AUFBVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -197,7 +197,7 @@

    AUFBVFP (Single Query Track)

    - + Bitwuzla 0 35902.557902.748359262221 @@ -206,7 +206,7 @@

    AUFBVFP (Single Query Track)

    - + Bitwuzla Fixedn 0 35909.984907.243359262221 @@ -215,7 +215,7 @@

    AUFBVFP (Single Query Track)

    - + cvc5 0 192738.4173203.178190193833 @@ -224,7 +224,7 @@

    AUFBVFP (Single Query Track)

    - + 2022-z3-4.8.17n 0 72246.5172246.9137255044 @@ -233,7 +233,7 @@

    AUFBVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.00005713 @@ -253,7 +253,7 @@

    AUFBVFP (Single Query Track)

    - + Bitwuzla 0 9212.931212.98299004821 @@ -262,7 +262,7 @@

    AUFBVFP (Single Query Track)

    - + Bitwuzla Fixedn 0 9216.06213.2999004821 @@ -271,7 +271,7 @@

    AUFBVFP (Single Query Track)

    - + 2022-z3-4.8.17n 0 24.0314.02822074844 @@ -280,7 +280,7 @@

    AUFBVFP (Single Query Track)

    - + cvc5 0 00.00.000094833 @@ -289,7 +289,7 @@

    AUFBVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.000094813 @@ -309,7 +309,7 @@

    AUFBVFP (Single Query Track)

    - + Bitwuzla 0 26689.626689.7652602613021 @@ -318,7 +318,7 @@

    AUFBVFP (Single Query Track)

    - + Bitwuzla Fixedn 0 26693.924693.9542602613021 @@ -327,7 +327,7 @@

    AUFBVFP (Single Query Track)

    - + cvc5 0 192738.4173203.1781901983033 @@ -336,7 +336,7 @@

    AUFBVFP (Single Query Track)

    - + 2022-z3-4.8.17n 0 52242.4862242.885505223044 @@ -345,7 +345,7 @@

    AUFBVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.0000273013 @@ -365,7 +365,7 @@

    AUFBVFP (Single Query Track)

    - + Bitwuzla Fixedn 0 2694.65791.872267193131 @@ -374,7 +374,7 @@

    AUFBVFP (Single Query Track)

    - + Bitwuzla 0 2692.04292.054267193131 @@ -383,7 +383,7 @@

    AUFBVFP (Single Query Track)

    - + cvc5 0 14120.913120.982140144343 @@ -392,7 +392,7 @@

    AUFBVFP (Single Query Track)

    - + 2022-z3-4.8.17n 0 39.9379.9323215452 @@ -401,7 +401,7 @@

    AUFBVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.00005716 @@ -425,7 +425,6 @@

    AUFBVFP (Single Query Track)

    - + - diff --git a/archive/2023/results/aufdtlia-proof-exhibition.html b/archive/2023/results/aufdtlia-proof-exhibition.html index 12bb9190..51d9e86a 100644 --- a/archive/2023/results/aufdtlia-proof-exhibition.html +++ b/archive/2023/results/aufdtlia-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFDTLIA (Proof Exhibition Track)

    Competition results for the AUFDTLIA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    AUFDTLIA (Proof Exhibition Track)

    - + SMTInterpol 0 87 @@ -130,7 +130,7 @@

    AUFDTLIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 86 @@ -141,7 +141,7 @@

    AUFDTLIA (Proof Exhibition Track)

    - + cvc5 0 28 @@ -163,7 +163,7 @@

    AUFDTLIA (Proof Exhibition Track)

    - + SMTInterpol 0 872254.5231591.5810 @@ -172,7 +172,7 @@

    AUFDTLIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 8613671.23213631.08922 @@ -181,7 +181,7 @@

    AUFDTLIA (Proof Exhibition Track)

    - + cvc5 0 28814.982805.8396059 @@ -205,7 +205,6 @@

    AUFDTLIA (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/aufdtlia-single-query.html b/archive/2023/results/aufdtlia-single-query.html index 5f7d41e6..3ebb3b29 100644 --- a/archive/2023/results/aufdtlia-single-query.html +++ b/archive/2023/results/aufdtlia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFDTLIA (Single Query Track)

    Competition results for the AUFDTLIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    AUFDTLIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    AUFDTLIA (Single Query Track)

    - + 2022-cvc5n 0 181 @@ -142,7 +142,7 @@

    AUFDTLIA (Single Query Track)

    - + cvc5 0 181 @@ -153,7 +153,7 @@

    AUFDTLIA (Single Query Track)

    - + SMTInterpol 0 88 @@ -164,7 +164,7 @@

    AUFDTLIA (Single Query Track)

    - + Vampire 0 88 @@ -175,7 +175,7 @@

    AUFDTLIA (Single Query Track)

    - + iProver 0 41 @@ -186,7 +186,7 @@

    AUFDTLIA (Single Query Track)

    - + iProver Fixedn 0 38 @@ -208,7 +208,7 @@

    AUFDTLIA (Single Query Track)

    - + 2022-cvc5n 0 18122243.83822582.646181938877 @@ -217,7 +217,7 @@

    AUFDTLIA (Single Query Track)

    - + cvc5 0 18123231.56224176.995181938877 @@ -226,7 +226,7 @@

    AUFDTLIA (Single Query Track)

    - + SMTInterpol 0 882068.2891376.1888818710035 @@ -235,7 +235,7 @@

    AUFDTLIA (Single Query Track)

    - + Vampire 0 887836.6731982.74588088100100 @@ -244,7 +244,7 @@

    AUFDTLIA (Single Query Track)

    - + iProver 0 5650168.24712777.04356056132109 @@ -253,7 +253,7 @@

    AUFDTLIA (Single Query Track)

    - + iProver Fixedn 0 5451263.99113109.54554054134111 @@ -273,7 +273,7 @@

    AUFDTLIA (Single Query Track)

    - + 2022-cvc5n 0 9322060.20522398.902939300957 @@ -282,7 +282,7 @@

    AUFDTLIA (Single Query Track)

    - + cvc5 0 9323131.04924076.472939300957 @@ -291,7 +291,7 @@

    AUFDTLIA (Single Query Track)

    - + SMTInterpol 0 10.8580.512110929535 @@ -300,7 +300,7 @@

    AUFDTLIA (Single Query Track)

    - + Vampire 0 00.00.00009395100 @@ -309,7 +309,7 @@

    AUFDTLIA (Single Query Track)

    - + iProver 0 00.00.00009395109 @@ -318,7 +318,7 @@

    AUFDTLIA (Single Query Track)

    - + iProver Fixedn 0 00.00.00009395111 @@ -338,7 +338,7 @@

    AUFDTLIA (Single Query Track)

    - + cvc5 0 88100.513100.5238808801007 @@ -347,7 +347,7 @@

    AUFDTLIA (Single Query Track)

    - + 2022-cvc5n 0 88183.633183.7448808801007 @@ -356,7 +356,7 @@

    AUFDTLIA (Single Query Track)

    - + Vampire 0 887836.6731982.745880880100100 @@ -365,7 +365,7 @@

    AUFDTLIA (Single Query Track)

    - + SMTInterpol 0 872067.4311375.67687087110035 @@ -374,7 +374,7 @@

    AUFDTLIA (Single Query Track)

    - + iProver 0 5650168.24712777.0435605632100109 @@ -383,7 +383,7 @@

    AUFDTLIA (Single Query Track)

    - + iProver Fixedn 0 5451263.99113109.5455405434100111 @@ -403,7 +403,7 @@

    AUFDTLIA (Single Query Track)

    - + cvc5 0 133116.297131.78713345885555 @@ -412,7 +412,7 @@

    AUFDTLIA (Single Query Track)

    - + 2022-cvc5n 0 133199.852200.66213345885555 @@ -421,7 +421,7 @@

    AUFDTLIA (Single Query Track)

    - + SMTInterpol 0 78733.578251.1757817711053 @@ -430,7 +430,7 @@

    AUFDTLIA (Single Query Track)

    - + Vampire 0 681788.787457.81368068120120 @@ -439,7 +439,7 @@

    AUFDTLIA (Single Query Track)

    - + iProver 0 1281.18125.32612012176176 @@ -448,7 +448,7 @@

    AUFDTLIA (Single Query Track)

    - + iProver Fixedn 0 12114.07733.86212012176176 @@ -472,7 +472,6 @@

    AUFDTLIA (Single Query Track)

    - + - diff --git a/archive/2023/results/aufdtlia-unsat-core.html b/archive/2023/results/aufdtlia-unsat-core.html index 5eced680..349e593d 100644 --- a/archive/2023/results/aufdtlia-unsat-core.html +++ b/archive/2023/results/aufdtlia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFDTLIA (Unsat Core Track)

    Competition results for the AUFDTLIA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    AUFDTLIA (Unsat Core Track)

    Sequential PerformanceParallel Performance SMTInterpolSMTInterpol - - + + @@ -126,7 +126,7 @@

    AUFDTLIA (Unsat Core Track)

    - + 2021-cvc5-ucn 0 61663 @@ -137,7 +137,7 @@

    AUFDTLIA (Unsat Core Track)

    - + SMTInterpol 0 61652 @@ -148,7 +148,7 @@

    AUFDTLIA (Unsat Core Track)

    - + Vampire 0 60772 @@ -159,7 +159,7 @@

    AUFDTLIA (Unsat Core Track)

    - + cvc5 0 1342 @@ -181,7 +181,7 @@

    AUFDTLIA (Unsat Core Track)

    - + 2021-cvc5-ucn 0 61663171.642167.1281 @@ -190,7 +190,7 @@

    AUFDTLIA (Unsat Core Track)

    - + SMTInterpol 0 616522060.8151376.9810 @@ -199,7 +199,7 @@

    AUFDTLIA (Unsat Core Track)

    - + Vampire 0 607724567.5751162.7750 @@ -208,7 +208,7 @@

    AUFDTLIA (Unsat Core Track)

    - + cvc5 0 1342187.69187.6760 @@ -232,7 +232,6 @@

    AUFDTLIA (Unsat Core Track)

    - + - diff --git a/archive/2023/results/aufdtlira-cloud.html b/archive/2023/results/aufdtlira-cloud.html index 4a5d9957..3f80a937 100644 --- a/archive/2023/results/aufdtlira-cloud.html +++ b/archive/2023/results/aufdtlira-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFDTLIRA (Cloud Track)

    Competition results for the AUFDTLIRA - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    AUFDTLIRA (Cloud Track)

    - + cvc5 0 14.36110250 @@ -126,7 +126,7 @@

    AUFDTLIRA (Cloud Track)

    - + Vampire 1 2118143.382102150 @@ -146,7 +146,7 @@

    AUFDTLIRA (Cloud Track)

    - + cvc5 0 14.361100250 @@ -155,7 +155,7 @@

    AUFDTLIRA (Cloud Track)

    - + Vampire 0 00.00001250 @@ -175,7 +175,7 @@

    AUFDTLIRA (Cloud Track)

    - + cvc5 0 00.00002150 @@ -184,7 +184,7 @@

    AUFDTLIRA (Cloud Track)

    - + Vampire 1 2118143.3821021050 @@ -204,7 +204,7 @@

    AUFDTLIRA (Cloud Track)

    - + cvc5 0 14.361102525 @@ -213,7 +213,7 @@

    AUFDTLIRA (Cloud Track)

    - + Vampire 0 00.00002626 @@ -237,7 +237,6 @@

    AUFDTLIRA (Cloud Track)

    - + - diff --git a/archive/2023/results/aufdtlira-parallel.html b/archive/2023/results/aufdtlira-parallel.html index 1958e91c..ed775e04 100644 --- a/archive/2023/results/aufdtlira-parallel.html +++ b/archive/2023/results/aufdtlira-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFDTLIRA (Parallel Track)

    Competition results for the AUFDTLIRA - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    AUFDTLIRA (Parallel Track)

    - + iProver 0 00.0000210 @@ -126,7 +126,7 @@

    AUFDTLIRA (Parallel Track)

    - + Vampire 1 203958.4172002010 @@ -146,7 +146,7 @@

    AUFDTLIRA (Parallel Track)

    - + Vampire 0 00.00001200 @@ -155,7 +155,7 @@

    AUFDTLIRA (Parallel Track)

    - + iProver 0 00.00001200 @@ -175,7 +175,7 @@

    AUFDTLIRA (Parallel Track)

    - + iProver 0 00.00002010 @@ -184,7 +184,7 @@

    AUFDTLIRA (Parallel Track)

    - + Vampire 1 203958.41720020010 @@ -204,7 +204,7 @@

    AUFDTLIRA (Parallel Track)

    - + Vampire 0 00.00002121 @@ -213,7 +213,7 @@

    AUFDTLIRA (Parallel Track)

    - + iProver 0 00.00002121 @@ -237,7 +237,6 @@

    AUFDTLIRA (Parallel Track)

    - + - diff --git a/archive/2023/results/aufdtlira-proof-exhibition.html b/archive/2023/results/aufdtlira-proof-exhibition.html index 25c71988..dc11264e 100644 --- a/archive/2023/results/aufdtlira-proof-exhibition.html +++ b/archive/2023/results/aufdtlira-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFDTLIRA (Proof Exhibition Track)

    Competition results for the AUFDTLIRA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    AUFDTLIRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 2473 @@ -130,7 +130,7 @@

    AUFDTLIRA (Proof Exhibition Track)

    - + cvc5 0 2364 @@ -141,7 +141,7 @@

    AUFDTLIRA (Proof Exhibition Track)

    - + SMTInterpol 0 2160 @@ -163,7 +163,7 @@

    AUFDTLIRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 24732051.9892043.0055415 @@ -172,7 +172,7 @@

    AUFDTLIRA (Proof Exhibition Track)

    - + cvc5 0 236414273.10814000.22516373 @@ -181,7 +181,7 @@

    AUFDTLIRA (Proof Exhibition Track)

    - + SMTInterpol 0 216026141.12216433.713367342 @@ -205,7 +205,6 @@

    AUFDTLIRA (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/aufdtlira-single-query.html b/archive/2023/results/aufdtlira-single-query.html index 07c3d539..61d4eea8 100644 --- a/archive/2023/results/aufdtlira-single-query.html +++ b/archive/2023/results/aufdtlira-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFDTLIRA (Single Query Track)

    Competition results for the AUFDTLIRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    AUFDTLIRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5— - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    AUFDTLIRA (Single Query Track)

    - + 2022-cvc5n 0 3645 @@ -142,7 +142,7 @@

    AUFDTLIRA (Single Query Track)

    - + cvc5 0 3644 @@ -153,7 +153,7 @@

    AUFDTLIRA (Single Query Track)

    - + Vampire 0 3342 @@ -164,7 +164,7 @@

    AUFDTLIRA (Single Query Track)

    - + SMTInterpol 0 2995 @@ -175,7 +175,7 @@

    AUFDTLIRA (Single Query Track)

    - + iProver 0 2902 @@ -186,7 +186,7 @@

    AUFDTLIRA (Single Query Track)

    - + iProver Fixedn 0 2890 @@ -208,7 +208,7 @@

    AUFDTLIRA (Single Query Track)

    - + 2022-cvc5n 0 36455680.7715727.328364503645540536 @@ -217,7 +217,7 @@

    AUFDTLIRA (Single Query Track)

    - + cvc5 0 36444844.5724929.386364403644541537 @@ -226,7 +226,7 @@

    AUFDTLIRA (Single Query Track)

    - + Vampire 0 3441772581.222193662.107344103441744744 @@ -235,7 +235,7 @@

    AUFDTLIRA (Single Query Track)

    - + SMTInterpol 0 299525616.73716360.89129950299511901020 @@ -244,7 +244,7 @@

    AUFDTLIRA (Single Query Track)

    - + iProver 0 2987338649.39186944.40329870298711981198 @@ -253,7 +253,7 @@

    AUFDTLIRA (Single Query Track)

    - + iProver Fixedn 0 2985352647.36890872.82429850298512001200 @@ -273,7 +273,7 @@

    AUFDTLIRA (Single Query Track)

    - + 2022-cvc5n 0 00.00.000004185536 @@ -282,7 +282,7 @@

    AUFDTLIRA (Single Query Track)

    - + Vampire 0 00.00.000004185744 @@ -291,7 +291,7 @@

    AUFDTLIRA (Single Query Track)

    - + cvc5 0 00.00.000004185537 @@ -300,7 +300,7 @@

    AUFDTLIRA (Single Query Track)

    - + SMTInterpol 0 00.00.0000041851020 @@ -309,7 +309,7 @@

    AUFDTLIRA (Single Query Track)

    - + iProver 0 00.00.0000041851198 @@ -318,7 +318,7 @@

    AUFDTLIRA (Single Query Track)

    - + iProver Fixedn 0 00.00.0000041851200 @@ -338,7 +338,7 @@

    AUFDTLIRA (Single Query Track)

    - + 2022-cvc5n 0 36455680.7715727.32836450364517523536 @@ -347,7 +347,7 @@

    AUFDTLIRA (Single Query Track)

    - + cvc5 0 36444844.5724929.38636440364418523537 @@ -356,7 +356,7 @@

    AUFDTLIRA (Single Query Track)

    - + Vampire 0 3441772581.222193662.107344103441221523744 @@ -365,7 +365,7 @@

    AUFDTLIRA (Single Query Track)

    - + SMTInterpol 0 299525616.73716360.8912995029956675231020 @@ -374,7 +374,7 @@

    AUFDTLIRA (Single Query Track)

    - + iProver 0 2987338649.39186944.4032987029876755231198 @@ -383,7 +383,7 @@

    AUFDTLIRA (Single Query Track)

    - + iProver Fixedn 0 2985352647.36890872.8242985029856775231200 @@ -403,7 +403,7 @@

    AUFDTLIRA (Single Query Track)

    - + cvc5 0 3613386.541384.867361303613572570 @@ -412,7 +412,7 @@

    AUFDTLIRA (Single Query Track)

    - + 2022-cvc5n 0 3613735.105728.678361303613572570 @@ -421,7 +421,7 @@

    AUFDTLIRA (Single Query Track)

    - + SMTInterpol 0 290313730.8766065.93929030290312821135 @@ -430,7 +430,7 @@

    AUFDTLIRA (Single Query Track)

    - + iProver Fixedn 0 251318189.3885634.54525130251316721672 @@ -439,7 +439,7 @@

    AUFDTLIRA (Single Query Track)

    - + iProver 0 251117918.2695529.62425110251116741674 @@ -448,7 +448,7 @@

    AUFDTLIRA (Single Query Track)

    - + Vampire 0 219911145.0433010.47721990219919861986 @@ -472,7 +472,6 @@

    AUFDTLIRA (Single Query Track)

    - + - diff --git a/archive/2023/results/aufdtlira-unsat-core.html b/archive/2023/results/aufdtlira-unsat-core.html index 6527a9c3..ab8b9d30 100644 --- a/archive/2023/results/aufdtlira-unsat-core.html +++ b/archive/2023/results/aufdtlira-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFDTLIRA (Unsat Core Track)

    Competition results for the AUFDTLIRA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    AUFDTLIRA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    AUFDTLIRA (Unsat Core Track)

    - + 2021-cvc5-ucn 0 187569 @@ -137,7 +137,7 @@

    AUFDTLIRA (Unsat Core Track)

    - + cvc5 0 184428 @@ -148,7 +148,7 @@

    AUFDTLIRA (Unsat Core Track)

    - + SMTInterpol 0 162742 @@ -159,7 +159,7 @@

    AUFDTLIRA (Unsat Core Track)

    - + Vampire 0 17222 @@ -181,7 +181,7 @@

    AUFDTLIRA (Unsat Core Track)

    - + 2021-cvc5-ucn 0 1875692318.7212317.62224 @@ -190,7 +190,7 @@

    AUFDTLIRA (Unsat Core Track)

    - + cvc5 0 1844282118.4732116.63140 @@ -199,7 +199,7 @@

    AUFDTLIRA (Unsat Core Track)

    - + SMTInterpol 0 16274230919.83619247.193665 @@ -208,7 +208,7 @@

    AUFDTLIRA (Unsat Core Track)

    - + Vampire 0 172221187.128330.122237 @@ -232,7 +232,6 @@

    AUFDTLIRA (Unsat Core Track)

    - + - diff --git a/archive/2023/results/aufdtnira-cloud.html b/archive/2023/results/aufdtnira-cloud.html index 85b623bc..e11e8173 100644 --- a/archive/2023/results/aufdtnira-cloud.html +++ b/archive/2023/results/aufdtnira-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFDTNIRA (Cloud Track)

    Competition results for the AUFDTNIRA - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    AUFDTNIRA (Cloud Track)

    - + Vampire 0 2119173.0032102130 @@ -126,7 +126,7 @@

    AUFDTNIRA (Cloud Track)

    - + cvc5 0 15.799101230 @@ -146,7 +146,7 @@

    AUFDTNIRA (Cloud Track)

    - + Vampire 0 00.00000240 @@ -155,7 +155,7 @@

    AUFDTNIRA (Cloud Track)

    - + cvc5 0 00.00000240 @@ -175,7 +175,7 @@

    AUFDTNIRA (Cloud Track)

    - + Vampire 0 2119173.00321021030 @@ -184,7 +184,7 @@

    AUFDTNIRA (Cloud Track)

    - + cvc5 0 15.7991012030 @@ -204,7 +204,7 @@

    AUFDTNIRA (Cloud Track)

    - + cvc5 0 15.7991012323 @@ -213,7 +213,7 @@

    AUFDTNIRA (Cloud Track)

    - + Vampire 0 00.00002424 @@ -237,7 +237,6 @@

    AUFDTNIRA (Cloud Track)

    - + - diff --git a/archive/2023/results/aufdtnira-parallel.html b/archive/2023/results/aufdtnira-parallel.html index 642bc3a3..12521ea8 100644 --- a/archive/2023/results/aufdtnira-parallel.html +++ b/archive/2023/results/aufdtnira-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFDTNIRA (Parallel Track)

    Competition results for the AUFDTNIRA - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    AUFDTNIRA (Parallel Track)

    - + Vampire 0 183508.7981801820 @@ -126,7 +126,7 @@

    AUFDTNIRA (Parallel Track)

    - + iProver 0 16.15101190 @@ -146,7 +146,7 @@

    AUFDTNIRA (Parallel Track)

    - + Vampire 0 00.00000200 @@ -155,7 +155,7 @@

    AUFDTNIRA (Parallel Track)

    - + iProver 0 00.00000200 @@ -175,7 +175,7 @@

    AUFDTNIRA (Parallel Track)

    - + Vampire 0 183508.79818018020 @@ -184,7 +184,7 @@

    AUFDTNIRA (Parallel Track)

    - + iProver 0 16.151011720 @@ -204,7 +204,7 @@

    AUFDTNIRA (Parallel Track)

    - + iProver 0 16.151011919 @@ -213,7 +213,7 @@

    AUFDTNIRA (Parallel Track)

    - + Vampire 0 00.00002020 @@ -237,7 +237,6 @@

    AUFDTNIRA (Parallel Track)

    - + - diff --git a/archive/2023/results/aufdtnira-proof-exhibition.html b/archive/2023/results/aufdtnira-proof-exhibition.html index 9626d3b1..1f71a185 100644 --- a/archive/2023/results/aufdtnira-proof-exhibition.html +++ b/archive/2023/results/aufdtnira-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFDTNIRA (Proof Exhibition Track)

    Competition results for the AUFDTNIRA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    AUFDTNIRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 370 @@ -130,7 +130,7 @@

    AUFDTNIRA (Proof Exhibition Track)

    - + cvc5 0 360 @@ -152,7 +152,7 @@

    AUFDTNIRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 370869.472867.87321 @@ -161,7 +161,7 @@

    AUFDTNIRA (Proof Exhibition Track)

    - + cvc5 0 3602742.5452712.687124 @@ -185,7 +185,6 @@

    AUFDTNIRA (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/aufdtnira-single-query.html b/archive/2023/results/aufdtnira-single-query.html index e64a74b0..6c336327 100644 --- a/archive/2023/results/aufdtnira-single-query.html +++ b/archive/2023/results/aufdtnira-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFDTNIRA (Single Query Track)

    Competition results for the AUFDTNIRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    AUFDTNIRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5— - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    AUFDTNIRA (Single Query Track)

    - + 2022-cvc5n 0 616 @@ -142,7 +142,7 @@

    AUFDTNIRA (Single Query Track)

    - + cvc5 0 616 @@ -153,7 +153,7 @@

    AUFDTNIRA (Single Query Track)

    - + Vampire 0 589 @@ -164,7 +164,7 @@

    AUFDTNIRA (Single Query Track)

    - + iProver 0 491 @@ -175,7 +175,7 @@

    AUFDTNIRA (Single Query Track)

    - + iProver Fixedn 0 490 @@ -197,7 +197,7 @@

    AUFDTNIRA (Single Query Track)

    - + 2022-cvc5n 0 616265.75265.5746160616146119 @@ -206,7 +206,7 @@

    AUFDTNIRA (Single Query Track)

    - + cvc5 0 6161453.3261453.2376160616146118 @@ -215,7 +215,7 @@

    AUFDTNIRA (Single Query Track)

    - + Vampire 0 597111801.34828114.3485970597165165 @@ -224,7 +224,7 @@

    AUFDTNIRA (Single Query Track)

    - + iProver 0 50347389.04712213.1335030503259259 @@ -233,7 +233,7 @@

    AUFDTNIRA (Single Query Track)

    - + iProver Fixedn 0 50245027.13611652.0245020502260260 @@ -253,7 +253,7 @@

    AUFDTNIRA (Single Query Track)

    - + 2022-cvc5n 0 00.00.00000762119 @@ -262,7 +262,7 @@

    AUFDTNIRA (Single Query Track)

    - + Vampire 0 00.00.00000762165 @@ -271,7 +271,7 @@

    AUFDTNIRA (Single Query Track)

    - + cvc5 0 00.00.00000762118 @@ -280,7 +280,7 @@

    AUFDTNIRA (Single Query Track)

    - + iProver 0 00.00.00000762259 @@ -289,7 +289,7 @@

    AUFDTNIRA (Single Query Track)

    - + iProver Fixedn 0 00.00.00000762260 @@ -309,7 +309,7 @@

    AUFDTNIRA (Single Query Track)

    - + 2022-cvc5n 0 616265.75265.57461606163143119 @@ -318,7 +318,7 @@

    AUFDTNIRA (Single Query Track)

    - + cvc5 0 6161453.3261453.23761606163143118 @@ -327,7 +327,7 @@

    AUFDTNIRA (Single Query Track)

    - + Vampire 0 597111801.34828114.348597059722143165 @@ -336,7 +336,7 @@

    AUFDTNIRA (Single Query Track)

    - + iProver 0 50347389.04712213.1335030503116143259 @@ -345,7 +345,7 @@

    AUFDTNIRA (Single Query Track)

    - + iProver Fixedn 0 50245027.13611652.0245020502117143260 @@ -365,7 +365,7 @@

    AUFDTNIRA (Single Query Track)

    - + 2022-cvc5n 0 614137.639137.4496140614148121 @@ -374,7 +374,7 @@

    AUFDTNIRA (Single Query Track)

    - + cvc5 0 61353.68453.3876130613149121 @@ -383,7 +383,7 @@

    AUFDTNIRA (Single Query Track)

    - + iProver Fixedn 0 4432014.274684.8244430443319319 @@ -392,7 +392,7 @@

    AUFDTNIRA (Single Query Track)

    - + iProver 0 4361773.037625.5824360436326326 @@ -401,7 +401,7 @@

    AUFDTNIRA (Single Query Track)

    - + Vampire 0 4201469.749409.3074200420342342 @@ -425,7 +425,6 @@

    AUFDTNIRA (Single Query Track)

    - + - diff --git a/archive/2023/results/aufdtnira-unsat-core.html b/archive/2023/results/aufdtnira-unsat-core.html index ac4b5a96..a66dba22 100644 --- a/archive/2023/results/aufdtnira-unsat-core.html +++ b/archive/2023/results/aufdtnira-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFDTNIRA (Unsat Core Track)

    Competition results for the AUFDTNIRA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    AUFDTNIRA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    AUFDTNIRA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 29672 @@ -137,7 +137,7 @@

    AUFDTNIRA (Unsat Core Track)

    - + cvc5 0 29409 @@ -148,7 +148,7 @@

    AUFDTNIRA (Unsat Core Track)

    - + Vampire 0 2226 @@ -170,7 +170,7 @@

    AUFDTNIRA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 2967250.07750.0291 @@ -179,7 +179,7 @@

    AUFDTNIRA (Unsat Core Track)

    - + cvc5 0 2940949.85449.6062 @@ -188,7 +188,7 @@

    AUFDTNIRA (Unsat Core Track)

    - + Vampire 0 2226447.731115.812 @@ -212,7 +212,6 @@

    AUFDTNIRA (Unsat Core Track)

    - + - diff --git a/archive/2023/results/auffpdtnira-proof-exhibition.html b/archive/2023/results/auffpdtnira-proof-exhibition.html index f1188559..368b1fdc 100644 --- a/archive/2023/results/auffpdtnira-proof-exhibition.html +++ b/archive/2023/results/auffpdtnira-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFFPDTNIRA (Proof Exhibition Track)

    Competition results for the AUFFPDTNIRA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    AUFFPDTNIRA (Proof Exhibition Track)

    - + cvc5 0 125 @@ -130,7 +130,7 @@

    AUFFPDTNIRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 123 @@ -152,7 +152,7 @@

    AUFFPDTNIRA (Proof Exhibition Track)

    - + cvc5 0 125240.837238.74566 @@ -161,7 +161,7 @@

    AUFFPDTNIRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 12341.66841.55681 @@ -185,7 +185,6 @@

    AUFFPDTNIRA (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/auflia-cloud.html b/archive/2023/results/auflia-cloud.html index b06fa303..4c6953e9 100644 --- a/archive/2023/results/auflia-cloud.html +++ b/archive/2023/results/auflia-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFLIA (Cloud Track)

    Competition results for the AUFLIA - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    AUFLIA (Cloud Track)

    - + Vampire 0 8923.0680890 @@ -126,7 +126,7 @@

    AUFLIA (Cloud Track)

    - + cvc5 0 1520.025101160 @@ -146,7 +146,7 @@

    AUFLIA (Cloud Track)

    - + Vampire 0 00.00000170 @@ -155,7 +155,7 @@

    AUFLIA (Cloud Track)

    - + cvc5 0 00.00000170 @@ -175,7 +175,7 @@

    AUFLIA (Cloud Track)

    - + Vampire 0 8923.06808090 @@ -184,7 +184,7 @@

    AUFLIA (Cloud Track)

    - + cvc5 0 1520.025101790 @@ -204,7 +204,7 @@

    AUFLIA (Cloud Track)

    - + Vampire 0 00.00001717 @@ -213,7 +213,7 @@

    AUFLIA (Cloud Track)

    - + cvc5 0 00.00001715 @@ -237,7 +237,6 @@

    AUFLIA (Cloud Track)

    - + - diff --git a/archive/2023/results/auflia-parallel.html b/archive/2023/results/auflia-parallel.html index 8fb9ed14..6e697f61 100644 --- a/archive/2023/results/auflia-parallel.html +++ b/archive/2023/results/auflia-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFLIA (Parallel Track)

    Competition results for the AUFLIA - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    AUFLIA (Parallel Track)

    - + Vampire 0 71002.14270790 @@ -126,7 +126,7 @@

    AUFLIA (Parallel Track)

    - + iProver 0 145.246101150 @@ -146,7 +146,7 @@

    AUFLIA (Parallel Track)

    - + Vampire 0 00.00000160 @@ -155,7 +155,7 @@

    AUFLIA (Parallel Track)

    - + iProver 0 00.00000160 @@ -175,7 +175,7 @@

    AUFLIA (Parallel Track)

    - + Vampire 0 71002.142707090 @@ -184,7 +184,7 @@

    AUFLIA (Parallel Track)

    - + iProver 0 145.246101690 @@ -204,7 +204,7 @@

    AUFLIA (Parallel Track)

    - + Vampire 0 331.0083031313 @@ -213,7 +213,7 @@

    AUFLIA (Parallel Track)

    - + iProver 0 00.00001616 @@ -237,7 +237,6 @@

    AUFLIA (Parallel Track)

    - + - diff --git a/archive/2023/results/auflia-proof-exhibition.html b/archive/2023/results/auflia-proof-exhibition.html index 9d1792d8..49b66939 100644 --- a/archive/2023/results/auflia-proof-exhibition.html +++ b/archive/2023/results/auflia-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFLIA (Proof Exhibition Track)

    Competition results for the AUFLIA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    AUFLIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 581 @@ -130,7 +130,7 @@

    AUFLIA (Proof Exhibition Track)

    - + cvc5 0 581 @@ -141,7 +141,7 @@

    AUFLIA (Proof Exhibition Track)

    - + SMTInterpol 0 499 @@ -163,7 +163,7 @@

    AUFLIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 5815158.4435158.3619696 @@ -172,7 +172,7 @@

    AUFLIA (Proof Exhibition Track)

    - + cvc5 0 5815295.7615293.0939696 @@ -181,7 +181,7 @@

    AUFLIA (Proof Exhibition Track)

    - + SMTInterpol 0 4997253.5915432.618178160 @@ -205,7 +205,6 @@

    AUFLIA (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/auflia-single-query.html b/archive/2023/results/auflia-single-query.html index fe08799e..b4904726 100644 --- a/archive/2023/results/auflia-single-query.html +++ b/archive/2023/results/auflia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFLIA (Single Query Track)

    Competition results for the AUFLIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    AUFLIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + Vampire - - + + Vampire - + @@ -131,7 +131,7 @@

    AUFLIA (Single Query Track)

    - + 2022-cvc5n 0 1411 @@ -142,7 +142,7 @@

    AUFLIA (Single Query Track)

    - + cvc5 0 1404 @@ -153,7 +153,7 @@

    AUFLIA (Single Query Track)

    - + Vampire 0 1393 @@ -164,7 +164,7 @@

    AUFLIA (Single Query Track)

    - + iProver 0 1231 @@ -175,7 +175,7 @@

    AUFLIA (Single Query Track)

    - + iProver Fixedn 0 1230 @@ -186,7 +186,7 @@

    AUFLIA (Single Query Track)

    - + SMTInterpol 0 1140 @@ -197,7 +197,7 @@

    AUFLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 65 @@ -219,7 +219,7 @@

    AUFLIA (Single Query Track)

    - + 2022-cvc5n 0 141122274.25922587.2114111501261252208 @@ -228,7 +228,7 @@

    AUFLIA (Single Query Track)

    - + cvc5 0 140422262.69922797.54914041481256259215 @@ -237,7 +237,7 @@

    AUFLIA (Single Query Track)

    - + Vampire 0 139923353.1656016.46113991111288264264 @@ -246,7 +246,7 @@

    AUFLIA (Single Query Track)

    - + iProver 0 123730318.7428227.898123701237426307 @@ -255,7 +255,7 @@

    AUFLIA (Single Query Track)

    - + iProver Fixedn 0 123731070.9538399.311123701237426306 @@ -264,7 +264,7 @@

    AUFLIA (Single Query Track)

    - + SMTInterpol 0 114024746.18920436.5981140951045523392 @@ -273,7 +273,7 @@

    AUFLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 65347.768196.49365145115980 @@ -293,7 +293,7 @@

    AUFLIA (Single Query Track)

    - + 2022-cvc5n 0 15011634.13211744.3371501500231490208 @@ -302,7 +302,7 @@

    AUFLIA (Single Query Track)

    - + cvc5 0 14810668.25510760.5041481480251490215 @@ -311,7 +311,7 @@

    AUFLIA (Single Query Track)

    - + Vampire 0 11116.8611.5371111110621490264 @@ -320,7 +320,7 @@

    AUFLIA (Single Query Track)

    - + SMTInterpol 0 9565.13648.895950781490392 @@ -329,7 +329,7 @@

    AUFLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 1466.59440.2561414015914900 @@ -338,7 +338,7 @@

    AUFLIA (Single Query Track)

    - + iProver 0 00.00.00001731490307 @@ -347,7 +347,7 @@

    AUFLIA (Single Query Track)

    - + iProver Fixedn 0 00.00.00001731490306 @@ -367,7 +367,7 @@

    AUFLIA (Single Query Track)

    - + Vampire 0 128823336.3056004.92412880128818357264 @@ -376,7 +376,7 @@

    AUFLIA (Single Query Track)

    - + 2022-cvc5n 0 126110640.12710842.87212610126145357208 @@ -385,7 +385,7 @@

    AUFLIA (Single Query Track)

    - + cvc5 0 125611594.44412037.04512560125650357215 @@ -394,7 +394,7 @@

    AUFLIA (Single Query Track)

    - + iProver 0 123730318.7428227.89812370123769357307 @@ -403,7 +403,7 @@

    AUFLIA (Single Query Track)

    - + iProver Fixedn 0 123731070.9538399.31112370123769357306 @@ -412,7 +412,7 @@

    AUFLIA (Single Query Track)

    - + SMTInterpol 0 104524681.05320387.798104501045261357392 @@ -421,7 +421,7 @@

    AUFLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 51281.174156.2375105112553570 @@ -441,7 +441,7 @@

    AUFLIA (Single Query Track)

    - + Vampire 0 13671327.635474.2813671111256296296 @@ -450,7 +450,7 @@

    AUFLIA (Single Query Track)

    - + 2022-cvc5n 0 1266297.332297.99112661081158397361 @@ -459,7 +459,7 @@

    AUFLIA (Single Query Track)

    - + cvc5 0 1260219.017221.19912601071153403369 @@ -468,7 +468,7 @@

    AUFLIA (Single Query Track)

    - + iProver 0 11925298.6841853.483119201192471373 @@ -477,7 +477,7 @@

    AUFLIA (Single Query Track)

    - + iProver Fixedn 0 11904923.7411767.384119001190473374 @@ -486,7 +486,7 @@

    AUFLIA (Single Query Track)

    - + SMTInterpol 0 10563704.6931663.907105695961607503 @@ -495,7 +495,7 @@

    AUFLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 65347.768196.493651451159821 @@ -519,7 +519,6 @@

    AUFLIA (Single Query Track)

    - + - diff --git a/archive/2023/results/auflia-unsat-core.html b/archive/2023/results/auflia-unsat-core.html index 3d236f76..95d3d921 100644 --- a/archive/2023/results/auflia-unsat-core.html +++ b/archive/2023/results/auflia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFLIA (Unsat Core Track)

    Competition results for the AUFLIA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    AUFLIA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    AUFLIA (Unsat Core Track)

    - + cvc5 0 35911 @@ -137,7 +137,7 @@

    AUFLIA (Unsat Core Track)

    - + 2021-cvc5-ucn 0 35468 @@ -148,7 +148,7 @@

    AUFLIA (Unsat Core Track)

    - + Vampire 0 30069 @@ -159,7 +159,7 @@

    AUFLIA (Unsat Core Track)

    - + SMTInterpol 0 29952 @@ -170,7 +170,7 @@

    AUFLIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 4 572 @@ -192,7 +192,7 @@

    AUFLIA (Unsat Core Track)

    - + cvc5 0 359114548.0634538.784138 @@ -201,7 +201,7 @@

    AUFLIA (Unsat Core Track)

    - + 2021-cvc5-ucn 0 354685091.5685091.105147 @@ -210,7 +210,7 @@

    AUFLIA (Unsat Core Track)

    - + Vampire 0 3049227578.4317073.05614 @@ -219,7 +219,7 @@

    AUFLIA (Unsat Core Track)

    - + SMTInterpol 0 2995225313.64520899.516247 @@ -228,7 +228,7 @@

    AUFLIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 4 572181.859103.3940 @@ -252,7 +252,6 @@

    AUFLIA (Unsat Core Track)

    - + - diff --git a/archive/2023/results/auflira-cloud.html b/archive/2023/results/auflira-cloud.html index b466c9c8..8c6eb17d 100644 --- a/archive/2023/results/auflira-cloud.html +++ b/archive/2023/results/auflira-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFLIRA (Cloud Track)

    Competition results for the AUFLIRA - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    AUFLIRA (Cloud Track)

    - + Vampire 0 00.0000110 @@ -126,7 +126,7 @@

    AUFLIRA (Cloud Track)

    - + cvc5 0 00.0000110 @@ -146,7 +146,7 @@

    AUFLIRA (Cloud Track)

    - + Vampire 0 00.0000470 @@ -155,7 +155,7 @@

    AUFLIRA (Cloud Track)

    - + cvc5 0 00.0000470 @@ -175,7 +175,7 @@

    AUFLIRA (Cloud Track)

    - + Vampire 0 00.00000110 @@ -184,7 +184,7 @@

    AUFLIRA (Cloud Track)

    - + cvc5 0 00.00000110 @@ -204,7 +204,7 @@

    AUFLIRA (Cloud Track)

    - + Vampire 0 00.00001111 @@ -213,7 +213,7 @@

    AUFLIRA (Cloud Track)

    - + cvc5 0 00.00001111 @@ -237,7 +237,6 @@

    AUFLIRA (Cloud Track)

    - + - diff --git a/archive/2023/results/auflira-parallel.html b/archive/2023/results/auflira-parallel.html index 04b5ac4c..bb069ccb 100644 --- a/archive/2023/results/auflira-parallel.html +++ b/archive/2023/results/auflira-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFLIRA (Parallel Track)

    Competition results for the AUFLIRA - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    AUFLIRA (Parallel Track)

    - + Vampire 0 00.0000100 @@ -126,7 +126,7 @@

    AUFLIRA (Parallel Track)

    - + iProver 0 00.0000100 @@ -146,7 +146,7 @@

    AUFLIRA (Parallel Track)

    - + Vampire 0 00.0000460 @@ -155,7 +155,7 @@

    AUFLIRA (Parallel Track)

    - + iProver 0 00.0000460 @@ -175,7 +175,7 @@

    AUFLIRA (Parallel Track)

    - + Vampire 0 00.00000100 @@ -184,7 +184,7 @@

    AUFLIRA (Parallel Track)

    - + iProver 0 00.00000100 @@ -204,7 +204,7 @@

    AUFLIRA (Parallel Track)

    - + Vampire 0 00.00001010 @@ -213,7 +213,7 @@

    AUFLIRA (Parallel Track)

    - + iProver 0 00.00001010 @@ -237,7 +237,6 @@

    AUFLIRA (Parallel Track)

    - + - diff --git a/archive/2023/results/auflira-proof-exhibition.html b/archive/2023/results/auflira-proof-exhibition.html index 17ba87a5..707623f5 100644 --- a/archive/2023/results/auflira-proof-exhibition.html +++ b/archive/2023/results/auflira-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFLIRA (Proof Exhibition Track)

    Competition results for the AUFLIRA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    AUFLIRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 4938 @@ -130,7 +130,7 @@

    AUFLIRA (Proof Exhibition Track)

    - + cvc5 0 4907 @@ -141,7 +141,7 @@

    AUFLIRA (Proof Exhibition Track)

    - + SMTInterpol 0 4749 @@ -163,7 +163,7 @@

    AUFLIRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 49387624.8857613.84188 @@ -172,7 +172,7 @@

    AUFLIRA (Proof Exhibition Track)

    - + cvc5 0 49072098.1452075.6333938 @@ -181,7 +181,7 @@

    AUFLIRA (Proof Exhibition Track)

    - + SMTInterpol 0 474916034.9739118.599197193 @@ -205,7 +205,6 @@

    AUFLIRA (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/auflira-single-query.html b/archive/2023/results/auflira-single-query.html index 034aab5f..2d19690e 100644 --- a/archive/2023/results/auflira-single-query.html +++ b/archive/2023/results/auflira-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFLIRA (Single Query Track)

    Competition results for the AUFLIRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    AUFLIRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5— - - + + cvc5 - - + + Vampire - + @@ -131,7 +131,7 @@

    AUFLIRA (Single Query Track)

    - + cvc5 0 1566 @@ -142,7 +142,7 @@

    AUFLIRA (Single Query Track)

    - + 2022-cvc5n 0 1566 @@ -153,7 +153,7 @@

    AUFLIRA (Single Query Track)

    - + Vampire 0 1538 @@ -164,7 +164,7 @@

    AUFLIRA (Single Query Track)

    - + iProver 0 1305 @@ -175,7 +175,7 @@

    AUFLIRA (Single Query Track)

    - + iProver Fixedn 0 1303 @@ -186,7 +186,7 @@

    AUFLIRA (Single Query Track)

    - + SMTInterpol 0 1247 @@ -197,7 +197,7 @@

    AUFLIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 3 @@ -219,7 +219,7 @@

    AUFLIRA (Single Query Track)

    - + cvc5 0 15667160.5717215.571156601566117113 @@ -228,7 +228,7 @@

    AUFLIRA (Single Query Track)

    - + 2022-cvc5n 0 15667716.8537827.777156601566117114 @@ -237,7 +237,7 @@

    AUFLIRA (Single Query Track)

    - + Vampire 0 154630744.2617897.161154601546137137 @@ -246,7 +246,7 @@

    AUFLIRA (Single Query Track)

    - + iProver 0 1369158245.11540572.545136901369314314 @@ -255,7 +255,7 @@

    AUFLIRA (Single Query Track)

    - + iProver Fixedn 0 1365152394.88339179.063136501365318318 @@ -264,7 +264,7 @@

    AUFLIRA (Single Query Track)

    - + SMTInterpol 0 124812306.10610370.602124801248435425 @@ -273,7 +273,7 @@

    AUFLIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 317.98610.25330316800 @@ -293,7 +293,7 @@

    AUFLIRA (Single Query Track)

    - + 2022-cvc5n 0 00.00.0000421641114 @@ -302,7 +302,7 @@

    AUFLIRA (Single Query Track)

    - + Vampire 0 00.00.0000421641137 @@ -311,7 +311,7 @@

    AUFLIRA (Single Query Track)

    - + cvc5 0 00.00.0000421641113 @@ -320,7 +320,7 @@

    AUFLIRA (Single Query Track)

    - + SMTInterpol 0 00.00.0000421641425 @@ -329,7 +329,7 @@

    AUFLIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.00004216410 @@ -338,7 +338,7 @@

    AUFLIRA (Single Query Track)

    - + iProver 0 00.00.0000421641314 @@ -347,7 +347,7 @@

    AUFLIRA (Single Query Track)

    - + iProver Fixedn 0 00.00.0000421641318 @@ -367,7 +367,7 @@

    AUFLIRA (Single Query Track)

    - + cvc5 0 15667160.5717215.5711566015663114113 @@ -376,7 +376,7 @@

    AUFLIRA (Single Query Track)

    - + 2022-cvc5n 0 15667716.8537827.7771566015663114114 @@ -385,7 +385,7 @@

    AUFLIRA (Single Query Track)

    - + Vampire 0 154630744.2617897.16115460154623114137 @@ -394,7 +394,7 @@

    AUFLIRA (Single Query Track)

    - + iProver 0 1369158245.11540572.545136901369200114314 @@ -403,7 +403,7 @@

    AUFLIRA (Single Query Track)

    - + iProver Fixedn 0 1365152394.88339179.063136501365204114318 @@ -412,7 +412,7 @@

    AUFLIRA (Single Query Track)

    - + SMTInterpol 0 124812306.10610370.602124801248321114425 @@ -421,7 +421,7 @@

    AUFLIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 317.98610.25330315661140 @@ -441,7 +441,7 @@

    AUFLIRA (Single Query Track)

    - + Vampire 0 15191656.651584.28151901519164164 @@ -450,7 +450,7 @@

    AUFLIRA (Single Query Track)

    - + cvc5 0 1495165.485164.87149501495188188 @@ -459,7 +459,7 @@

    AUFLIRA (Single Query Track)

    - + 2022-cvc5n 0 1495188.076187.533149501495188188 @@ -468,7 +468,7 @@

    AUFLIRA (Single Query Track)

    - + SMTInterpol 0 12191909.6361001.962121901219464463 @@ -477,7 +477,7 @@

    AUFLIRA (Single Query Track)

    - + iProver 0 11744056.2721517.884117401174509509 @@ -486,7 +486,7 @@

    AUFLIRA (Single Query Track)

    - + iProver Fixedn 0 11663909.0111477.08116601166517517 @@ -495,7 +495,7 @@

    AUFLIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 317.98610.253303168012 @@ -519,7 +519,6 @@

    AUFLIRA (Single Query Track)

    - + - diff --git a/archive/2023/results/auflira-unsat-core.html b/archive/2023/results/auflira-unsat-core.html index cdad0d34..9de87ede 100644 --- a/archive/2023/results/auflira-unsat-core.html +++ b/archive/2023/results/auflira-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFLIRA (Unsat Core Track)

    Competition results for the AUFLIRA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    AUFLIRA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    AUFLIRA (Unsat Core Track)

    - + cvc5 0 177780 @@ -137,7 +137,7 @@

    AUFLIRA (Unsat Core Track)

    - + 2021-cvc5-ucn 0 177768 @@ -148,7 +148,7 @@

    AUFLIRA (Unsat Core Track)

    - + SMTInterpol 0 151535 @@ -159,7 +159,7 @@

    AUFLIRA (Unsat Core Track)

    - + Vampire 0 76304 @@ -170,7 +170,7 @@

    AUFLIRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 4732 @@ -192,7 +192,7 @@

    AUFLIRA (Unsat Core Track)

    - + cvc5 0 17778012966.16312954.30927 @@ -201,7 +201,7 @@

    AUFLIRA (Unsat Core Track)

    - + 2021-cvc5-ucn 0 177768855.422851.8362 @@ -210,7 +210,7 @@

    AUFLIRA (Unsat Core Track)

    - + SMTInterpol 0 15153518756.27811720.628409 @@ -219,7 +219,7 @@

    AUFLIRA (Unsat Core Track)

    - + Vampire 0 7631522978.5296144.69622 @@ -228,7 +228,7 @@

    AUFLIRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 47322237.9761233.1850 @@ -252,7 +252,6 @@

    AUFLIRA (Unsat Core Track)

    - + - diff --git a/archive/2023/results/aufnia-proof-exhibition.html b/archive/2023/results/aufnia-proof-exhibition.html index 0c84241a..531ed384 100644 --- a/archive/2023/results/aufnia-proof-exhibition.html +++ b/archive/2023/results/aufnia-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFNIA (Proof Exhibition Track)

    Competition results for the AUFNIA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    AUFNIA (Proof Exhibition Track)

    - + cvc5 0 0 @@ -130,7 +130,7 @@

    AUFNIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 0 @@ -152,7 +152,7 @@

    AUFNIA (Proof Exhibition Track)

    - + cvc5 0 00.00.033 @@ -161,7 +161,7 @@

    AUFNIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 00.00.033 @@ -185,7 +185,6 @@

    AUFNIA (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/aufnia-single-query.html b/archive/2023/results/aufnia-single-query.html index cf8decaf..32a69a52 100644 --- a/archive/2023/results/aufnia-single-query.html +++ b/archive/2023/results/aufnia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFNIA (Single Query Track)

    Competition results for the AUFNIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    AUFNIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) ——— - - + + — - - + + — - + @@ -131,7 +131,7 @@

    AUFNIA (Single Query Track)

    - + 2022-cvc5n 0 0 @@ -142,7 +142,7 @@

    AUFNIA (Single Query Track)

    - + Vampire 0 0 @@ -153,7 +153,7 @@

    AUFNIA (Single Query Track)

    - + cvc5 0 0 @@ -164,7 +164,7 @@

    AUFNIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -175,7 +175,7 @@

    AUFNIA (Single Query Track)

    - + iProver 0 0 @@ -186,7 +186,7 @@

    AUFNIA (Single Query Track)

    - + iProver Fixedn 0 0 @@ -208,7 +208,7 @@

    AUFNIA (Single Query Track)

    - + 2022-cvc5n 0 00.00.000033 @@ -217,7 +217,7 @@

    AUFNIA (Single Query Track)

    - + Vampire 0 00.00.000033 @@ -226,7 +226,7 @@

    AUFNIA (Single Query Track)

    - + cvc5 0 00.00.000033 @@ -235,7 +235,7 @@

    AUFNIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.000030 @@ -244,7 +244,7 @@

    AUFNIA (Single Query Track)

    - + iProver 0 00.00.000033 @@ -253,7 +253,7 @@

    AUFNIA (Single Query Track)

    - + iProver Fixedn 0 00.00.000033 @@ -273,7 +273,7 @@

    AUFNIA (Single Query Track)

    - + 2022-cvc5n 0 00.00.0000033 @@ -282,7 +282,7 @@

    AUFNIA (Single Query Track)

    - + Vampire 0 00.00.0000033 @@ -291,7 +291,7 @@

    AUFNIA (Single Query Track)

    - + cvc5 0 00.00.0000033 @@ -300,7 +300,7 @@

    AUFNIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.0000030 @@ -309,7 +309,7 @@

    AUFNIA (Single Query Track)

    - + iProver 0 00.00.0000033 @@ -318,7 +318,7 @@

    AUFNIA (Single Query Track)

    - + iProver Fixedn 0 00.00.0000033 @@ -338,7 +338,7 @@

    AUFNIA (Single Query Track)

    - + 2022-cvc5n 0 00.00.0000303 @@ -347,7 +347,7 @@

    AUFNIA (Single Query Track)

    - + Vampire 0 00.00.0000303 @@ -356,7 +356,7 @@

    AUFNIA (Single Query Track)

    - + cvc5 0 00.00.0000303 @@ -365,7 +365,7 @@

    AUFNIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.0000300 @@ -374,7 +374,7 @@

    AUFNIA (Single Query Track)

    - + iProver 0 00.00.0000303 @@ -383,7 +383,7 @@

    AUFNIA (Single Query Track)

    - + iProver Fixedn 0 00.00.0000303 @@ -403,7 +403,7 @@

    AUFNIA (Single Query Track)

    - + 2022-cvc5n 0 00.00.000033 @@ -412,7 +412,7 @@

    AUFNIA (Single Query Track)

    - + Vampire 0 00.00.000033 @@ -421,7 +421,7 @@

    AUFNIA (Single Query Track)

    - + cvc5 0 00.00.000033 @@ -430,7 +430,7 @@

    AUFNIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.000030 @@ -439,7 +439,7 @@

    AUFNIA (Single Query Track)

    - + iProver 0 00.00.000033 @@ -448,7 +448,7 @@

    AUFNIA (Single Query Track)

    - + iProver Fixedn 0 00.00.000033 @@ -472,7 +472,6 @@

    AUFNIA (Single Query Track)

    - + - diff --git a/archive/2023/results/aufnia-unsat-core.html b/archive/2023/results/aufnia-unsat-core.html index e8d26a0d..69910d20 100644 --- a/archive/2023/results/aufnia-unsat-core.html +++ b/archive/2023/results/aufnia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFNIA (Unsat Core Track)

    Competition results for the AUFNIA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    AUFNIA (Unsat Core Track)

    Sequential PerformanceParallel Performance —— - - + + @@ -126,7 +126,7 @@

    AUFNIA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 0 @@ -137,7 +137,7 @@

    AUFNIA (Unsat Core Track)

    - + Vampire 0 0 @@ -148,7 +148,7 @@

    AUFNIA (Unsat Core Track)

    - + cvc5 0 0 @@ -159,7 +159,7 @@

    AUFNIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -181,7 +181,7 @@

    AUFNIA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 00.00.03 @@ -190,7 +190,7 @@

    AUFNIA (Unsat Core Track)

    - + Vampire 0 00.00.03 @@ -199,7 +199,7 @@

    AUFNIA (Unsat Core Track)

    - + cvc5 0 00.00.03 @@ -208,7 +208,7 @@

    AUFNIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 00.00.00 @@ -232,7 +232,6 @@

    AUFNIA (Unsat Core Track)

    - + - diff --git a/archive/2023/results/aufnira-cloud.html b/archive/2023/results/aufnira-cloud.html index 3b12dfcd..9ecb432d 100644 --- a/archive/2023/results/aufnira-cloud.html +++ b/archive/2023/results/aufnira-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFNIRA (Cloud Track)

    Competition results for the AUFNIRA - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    AUFNIRA (Cloud Track)

    - + Vampire 0 32527.084303380 @@ -126,7 +126,7 @@

    AUFNIRA (Cloud Track)

    - + cvc5 0 18.45110400 @@ -146,7 +146,7 @@

    AUFNIRA (Cloud Track)

    - + cvc5 0 18.451100390 @@ -155,7 +155,7 @@

    AUFNIRA (Cloud Track)

    - + Vampire 0 00.00001390 @@ -175,7 +175,7 @@

    AUFNIRA (Cloud Track)

    - + Vampire 0 32527.0843030370 @@ -184,7 +184,7 @@

    AUFNIRA (Cloud Track)

    - + cvc5 0 00.00003370 @@ -204,7 +204,7 @@

    AUFNIRA (Cloud Track)

    - + cvc5 0 18.451104040 @@ -213,7 +213,7 @@

    AUFNIRA (Cloud Track)

    - + Vampire 0 00.00004141 @@ -237,7 +237,6 @@

    AUFNIRA (Cloud Track)

    - + - diff --git a/archive/2023/results/aufnira-incremental.html b/archive/2023/results/aufnira-incremental.html index 01696961..4b033d02 100644 --- a/archive/2023/results/aufnira-incremental.html +++ b/archive/2023/results/aufnira-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFNIRA (Incremental Track)

    Competition results for the AUFNIRA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    AUFNIRA (Incremental Track)

    Parallel Performance cvc5 - - + + @@ -124,7 +124,7 @@

    AUFNIRA (Incremental Track)

    - + cvc5 0 30421331.21353.2941027 @@ -133,7 +133,7 @@

    AUFNIRA (Incremental Track)

    - + 2022-z3-4.8.17n 0 27526240.766257.0770034 @@ -142,7 +142,7 @@

    AUFNIRA (Incremental Track)

    - + UltimateEliminator+MathSAT 0 00.00.034520 @@ -151,7 +151,7 @@

    AUFNIRA (Incremental Track)

    - + SMTInterpol 0 00.00.034520 @@ -175,7 +175,6 @@

    AUFNIRA (Incremental Track)

    - + - diff --git a/archive/2023/results/aufnira-parallel.html b/archive/2023/results/aufnira-parallel.html index 180d92bf..ca4ecf9b 100644 --- a/archive/2023/results/aufnira-parallel.html +++ b/archive/2023/results/aufnira-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFNIRA (Parallel Track)

    Competition results for the AUFNIRA - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    AUFNIRA (Parallel Track)

    - + Vampire 0 41843.91404280 @@ -126,7 +126,7 @@

    AUFNIRA (Parallel Track)

    - + iProver 0 21211.274202300 @@ -146,7 +146,7 @@

    AUFNIRA (Parallel Track)

    - + Vampire 0 00.00000310 @@ -155,7 +155,7 @@

    AUFNIRA (Parallel Track)

    - + iProver 0 00.00000310 @@ -175,7 +175,7 @@

    AUFNIRA (Parallel Track)

    - + Vampire 0 41843.914041260 @@ -184,7 +184,7 @@

    AUFNIRA (Parallel Track)

    - + iProver 0 21211.2742023260 @@ -204,7 +204,7 @@

    AUFNIRA (Parallel Track)

    - + iProver 0 117.2041013131 @@ -213,7 +213,7 @@

    AUFNIRA (Parallel Track)

    - + Vampire 0 00.00003232 @@ -237,7 +237,6 @@

    AUFNIRA (Parallel Track)

    - + - diff --git a/archive/2023/results/aufnira-proof-exhibition.html b/archive/2023/results/aufnira-proof-exhibition.html index 07c277f4..71e8a678 100644 --- a/archive/2023/results/aufnira-proof-exhibition.html +++ b/archive/2023/results/aufnira-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFNIRA (Proof Exhibition Track)

    Competition results for the AUFNIRA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    AUFNIRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 260 @@ -130,7 +130,7 @@

    AUFNIRA (Proof Exhibition Track)

    - + cvc5 0 260 @@ -152,7 +152,7 @@

    AUFNIRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 2601300.0351295.3492626 @@ -161,7 +161,7 @@

    AUFNIRA (Proof Exhibition Track)

    - + cvc5 0 2601783.2511778.9412626 @@ -185,7 +185,6 @@

    AUFNIRA (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/aufnira-single-query.html b/archive/2023/results/aufnira-single-query.html index c00a226b..7f47ebe9 100644 --- a/archive/2023/results/aufnira-single-query.html +++ b/archive/2023/results/aufnira-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFNIRA (Single Query Track)

    Competition results for the AUFNIRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    AUFNIRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5Vampire— - - + + Vampire - - + + Vampire - + @@ -131,7 +131,7 @@

    AUFNIRA (Single Query Track)

    - + cvc5 0 54 @@ -142,7 +142,7 @@

    AUFNIRA (Single Query Track)

    - + 2022-cvc5n 0 52 @@ -153,7 +153,7 @@

    AUFNIRA (Single Query Track)

    - + Vampire 0 52 @@ -164,7 +164,7 @@

    AUFNIRA (Single Query Track)

    - + iProver Fixedn 0 32 @@ -175,7 +175,7 @@

    AUFNIRA (Single Query Track)

    - + iProver 0 32 @@ -186,7 +186,7 @@

    AUFNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -208,7 +208,7 @@

    AUFNIRA (Single Query Track)

    - + Vampire 0 5819183.884835.41258058242242 @@ -217,7 +217,7 @@

    AUFNIRA (Single Query Track)

    - + cvc5 0 543523.1653591.32554054246244 @@ -226,7 +226,7 @@

    AUFNIRA (Single Query Track)

    - + 2022-cvc5n 0 523079.693108.90352052248246 @@ -235,7 +235,7 @@

    AUFNIRA (Single Query Track)

    - + iProver 0 3817161.5194350.76538038262262 @@ -244,7 +244,7 @@

    AUFNIRA (Single Query Track)

    - + iProver Fixedn 0 3610677.2322720.12336036264264 @@ -253,7 +253,7 @@

    AUFNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.00003002 @@ -273,7 +273,7 @@

    AUFNIRA (Single Query Track)

    - + 2022-cvc5n 0 00.00.00001299246 @@ -282,7 +282,7 @@

    AUFNIRA (Single Query Track)

    - + Vampire 0 00.00.00001299242 @@ -291,7 +291,7 @@

    AUFNIRA (Single Query Track)

    - + cvc5 0 00.00.00001299244 @@ -300,7 +300,7 @@

    AUFNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.000012992 @@ -309,7 +309,7 @@

    AUFNIRA (Single Query Track)

    - + iProver 0 00.00.00001299262 @@ -318,7 +318,7 @@

    AUFNIRA (Single Query Track)

    - + iProver Fixedn 0 00.00.00001299264 @@ -338,7 +338,7 @@

    AUFNIRA (Single Query Track)

    - + Vampire 0 5819183.884835.4125805813229242 @@ -347,7 +347,7 @@

    AUFNIRA (Single Query Track)

    - + cvc5 0 543523.1653591.3255405417229244 @@ -356,7 +356,7 @@

    AUFNIRA (Single Query Track)

    - + 2022-cvc5n 0 523079.693108.9035205219229246 @@ -365,7 +365,7 @@

    AUFNIRA (Single Query Track)

    - + iProver 0 3817161.5194350.7653803833229262 @@ -374,7 +374,7 @@

    AUFNIRA (Single Query Track)

    - + iProver Fixedn 0 3610677.2322720.1233603635229264 @@ -383,7 +383,7 @@

    AUFNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.0000712292 @@ -403,7 +403,7 @@

    AUFNIRA (Single Query Track)

    - + Vampire 0 41303.2382.36741041259259 @@ -412,7 +412,7 @@

    AUFNIRA (Single Query Track)

    - + cvc5 0 3468.88768.87934034266265 @@ -421,7 +421,7 @@

    AUFNIRA (Single Query Track)

    - + 2022-cvc5n 0 2946.14246.14129029271270 @@ -430,7 +430,7 @@

    AUFNIRA (Single Query Track)

    - + iProver Fixedn 0 23157.09347.10223023277277 @@ -439,7 +439,7 @@

    AUFNIRA (Single Query Track)

    - + iProver 0 22131.31340.44822022278278 @@ -448,7 +448,7 @@

    AUFNIRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.00003008 @@ -472,7 +472,6 @@

    AUFNIRA (Single Query Track)

    - + - diff --git a/archive/2023/results/aufnira-unsat-core.html b/archive/2023/results/aufnira-unsat-core.html index 24580c48..b380b347 100644 --- a/archive/2023/results/aufnira-unsat-core.html +++ b/archive/2023/results/aufnira-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    AUFNIRA (Unsat Core Track)

    Competition results for the AUFNIRA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    AUFNIRA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    AUFNIRA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 16763 @@ -137,7 +137,7 @@

    AUFNIRA (Unsat Core Track)

    - + cvc5 0 15932 @@ -148,7 +148,7 @@

    AUFNIRA (Unsat Core Track)

    - + Vampire 0 2127 @@ -159,7 +159,7 @@

    AUFNIRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 273 @@ -181,7 +181,7 @@

    AUFNIRA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 16763165.863142.80432 @@ -190,7 +190,7 @@

    AUFNIRA (Unsat Core Track)

    - + cvc5 0 15932976.037975.96233 @@ -199,7 +199,7 @@

    AUFNIRA (Unsat Core Track)

    - + Vampire 0 212713.786.51810 @@ -208,7 +208,7 @@

    AUFNIRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 273128.58668.2550 @@ -232,7 +232,6 @@

    AUFNIRA (Unsat Core Track)

    - + - diff --git a/archive/2023/results/biggest-lead-cloud.html b/archive/2023/results/biggest-lead-cloud.html index 8e334e24..7dcd8d7a 100644 --- a/archive/2023/results/biggest-lead-cloud.html +++ b/archive/2023/results/biggest-lead-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -105,7 +105,7 @@

    Parallel Performance

    - + Vampire 7.83333333 @@ -119,7 +119,7 @@

    Parallel Performance

    - + Vampire 3.0 @@ -133,7 +133,7 @@

    Parallel Performance

    - + cvc5 0.77358491 @@ -147,7 +147,7 @@

    Parallel Performance

    - + cvc5 0.59183673 @@ -185,7 +185,6 @@

    Parallel Performance

    - + - diff --git a/archive/2023/results/biggest-lead-incremental.html b/archive/2023/results/biggest-lead-incremental.html index 3be5794d..76a58434 100644 --- a/archive/2023/results/biggest-lead-incremental.html +++ b/archive/2023/results/biggest-lead-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -90,27 +90,27 @@

    Winners

    - - + + - - - - + + + + - - + + - - - - + + + +
    Parallel Performance
    - + Bitwuzla - +
    @@ -130,7 +130,7 @@

    Parallel Performance

    - + Bitwuzla 2.77167277 @@ -144,7 +144,7 @@

    Parallel Performance

    - + SMTInterpol 2.40420235 @@ -158,7 +158,7 @@

    Parallel Performance

    - + Bitwuzla 1.64455569 @@ -172,7 +172,7 @@

    Parallel Performance

    - + cvc5 1.52807539 @@ -186,7 +186,7 @@

    Parallel Performance

    - + cvc5 1.46551818 @@ -200,7 +200,7 @@

    Parallel Performance

    - + Yices2 1.18725751 @@ -214,7 +214,7 @@

    Parallel Performance

    - + Bitwuzla 1.14714445 @@ -228,7 +228,7 @@

    Parallel Performance

    - + SMTInterpol 1.05892894 @@ -242,7 +242,7 @@

    Parallel Performance

    - + OpenSMT 1.05181918 @@ -256,7 +256,7 @@

    Parallel Performance

    - + cvc5 1.04245994 @@ -270,7 +270,7 @@

    Parallel Performance

    - + cvc5 1.03331699 @@ -284,7 +284,7 @@

    Parallel Performance

    - + cvc5 1.01793463 @@ -298,7 +298,7 @@

    Parallel Performance

    - + Bitwuzla 1.00533428 @@ -312,7 +312,7 @@

    Parallel Performance

    - + Bitwuzla 1.00317615 @@ -326,7 +326,7 @@

    Parallel Performance

    - + cvc5 1.0 @@ -340,7 +340,7 @@

    Parallel Performance

    - + cvc5 1.0 @@ -354,7 +354,7 @@

    Parallel Performance

    - + cvc5 0.93154034 @@ -392,7 +392,6 @@

    Parallel Performance

    - + - diff --git a/archive/2023/results/biggest-lead-model-validation.html b/archive/2023/results/biggest-lead-model-validation.html index d1b4fbc1..f9503faa 100644 --- a/archive/2023/results/biggest-lead-model-validation.html +++ b/archive/2023/results/biggest-lead-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -90,35 +90,35 @@

    Winners

    - + - - + + - - - - + + + + - + - - + + - - - - + + + +
    Sequential PerformanceParallel Performance
    - + Z3++ - + - + Z3++ - +

    Sequential Performance

    @@ -136,7 +136,7 @@

    Sequential Performance

    - + SMTInterpol 1.49541284 @@ -150,7 +150,7 @@

    Sequential Performance

    - + cvc5 1.38431373 @@ -164,7 +164,7 @@

    Sequential Performance

    - + SMTInterpol 1.09640523 @@ -178,7 +178,7 @@

    Sequential Performance

    - + Bitwuzla 1.07727273 @@ -192,7 +192,7 @@

    Sequential Performance

    - + Z3++ 1.07597851 @@ -206,7 +206,7 @@

    Sequential Performance

    - + Z3++ 1.0369726 @@ -220,7 +220,7 @@

    Sequential Performance

    - + Bitwuzla 1.02487562 @@ -234,7 +234,7 @@

    Sequential Performance

    - + Z3++ 1.01831352 @@ -248,7 +248,7 @@

    Sequential Performance

    - + SMTInterpol 1.00223714 @@ -262,7 +262,7 @@

    Sequential Performance

    - + OpenSMT 1.00164204 @@ -276,7 +276,7 @@

    Sequential Performance

    - + STP 1.00055432 @@ -290,7 +290,7 @@

    Sequential Performance

    - + Bitwuzla 1.00045171 @@ -304,7 +304,7 @@

    Sequential Performance

    - + Yices2 1.0 @@ -333,7 +333,7 @@

    Parallel Performance

    - + SMTInterpol 1.49541284 @@ -347,7 +347,7 @@

    Parallel Performance

    - + cvc5 1.38431373 @@ -361,7 +361,7 @@

    Parallel Performance

    - + SMTInterpol 1.09640523 @@ -375,7 +375,7 @@

    Parallel Performance

    - + Bitwuzla 1.07727273 @@ -389,7 +389,7 @@

    Parallel Performance

    - + Z3++ 1.07597851 @@ -403,7 +403,7 @@

    Parallel Performance

    - + Z3++ 1.0369726 @@ -417,7 +417,7 @@

    Parallel Performance

    - + Bitwuzla 1.02487562 @@ -431,7 +431,7 @@

    Parallel Performance

    - + Z3++ 1.01831352 @@ -445,7 +445,7 @@

    Parallel Performance

    - + SMTInterpol 1.0033557 @@ -459,7 +459,7 @@

    Parallel Performance

    - + OpenSMT 1.00164204 @@ -473,7 +473,7 @@

    Parallel Performance

    - + STP 1.00055432 @@ -487,7 +487,7 @@

    Parallel Performance

    - + Bitwuzla 1.00045171 @@ -501,7 +501,7 @@

    Parallel Performance

    - + Yices2 1.0 @@ -539,7 +539,6 @@

    Parallel Performance

    - + - diff --git a/archive/2023/results/biggest-lead-parallel.html b/archive/2023/results/biggest-lead-parallel.html index 2d82a189..ea493956 100644 --- a/archive/2023/results/biggest-lead-parallel.html +++ b/archive/2023/results/biggest-lead-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -105,7 +105,7 @@

    Parallel Performance

    - + Vampire 8.25 @@ -119,7 +119,7 @@

    Parallel Performance

    - + Vampire 3.0 @@ -133,7 +133,7 @@

    Parallel Performance

    - + iProver 0.15555556 @@ -147,7 +147,7 @@

    Parallel Performance

    - + iProver 0.04545455 @@ -185,7 +185,6 @@

    Parallel Performance

    - + - diff --git a/archive/2023/results/biggest-lead-single-query.html b/archive/2023/results/biggest-lead-single-query.html index db72cbff..0d7418c1 100644 --- a/archive/2023/results/biggest-lead-single-query.html +++ b/archive/2023/results/biggest-lead-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -90,59 +90,59 @@

    Winners

    - + - - + + - - + + - - + + - - + + - + - + - - + + - - + + - - + + - - + + - +
    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24 seconds Performance (parallel)
    - + cvc5 - + - + cvc5 - + - + cvc5 - + - + cvc5 - + - + SMTInterpol - +

    Sequential Performance

    @@ -160,7 +160,7 @@

    Sequential Performance

    - + cvc5 2.69117647 @@ -174,7 +174,7 @@

    Sequential Performance

    - + cvc5 2.14190094 @@ -188,7 +188,7 @@

    Sequential Performance

    - + cvc5 1.9954023 @@ -202,7 +202,7 @@

    Sequential Performance

    - + cvc5 1.2795501 @@ -216,7 +216,7 @@

    Sequential Performance

    - + Bitwuzla 1.14866667 @@ -230,7 +230,7 @@

    Sequential Performance

    - + cvc5 1.11956732 @@ -244,7 +244,7 @@

    Sequential Performance

    - + cvc5 1.11799762 @@ -258,7 +258,7 @@

    Sequential Performance

    - + Bitwuzla 1.10166799 @@ -272,7 +272,7 @@

    Sequential Performance

    - + Z3++ 1.06208143 @@ -286,7 +286,7 @@

    Sequential Performance

    - + cvc5 1.03965393 @@ -300,7 +300,7 @@

    Sequential Performance

    - + Z3++ 1.03208773 @@ -314,7 +314,7 @@

    Sequential Performance

    - + Bitwuzla 1.03205531 @@ -328,7 +328,7 @@

    Sequential Performance

    - + STP 1.02380705 @@ -342,7 +342,7 @@

    Sequential Performance

    - + SMTInterpol 1.02086858 @@ -356,7 +356,7 @@

    Sequential Performance

    - + cvc5 1.01770536 @@ -370,7 +370,7 @@

    Sequential Performance

    - + cvc5 1.01078167 @@ -384,7 +384,7 @@

    Sequential Performance

    - + Yices2 1.00267738 @@ -398,7 +398,7 @@

    Sequential Performance

    - + Z3++ 1.00051573 @@ -412,7 +412,7 @@

    Sequential Performance

    - + Yices2 1.0 @@ -441,7 +441,7 @@

    Parallel Performance

    - + cvc5 5.04651163 @@ -455,7 +455,7 @@

    Parallel Performance

    - + cvc5 2.14190094 @@ -469,7 +469,7 @@

    Parallel Performance

    - + cvc5 2.12790698 @@ -483,7 +483,7 @@

    Parallel Performance

    - + cvc5 1.32905297 @@ -497,7 +497,7 @@

    Parallel Performance

    - + cvc5 1.19408397 @@ -511,7 +511,7 @@

    Parallel Performance

    - + Bitwuzla 1.14866667 @@ -525,7 +525,7 @@

    Parallel Performance

    - + cvc5 1.11799762 @@ -539,7 +539,7 @@

    Parallel Performance

    - + Bitwuzla 1.10166799 @@ -553,7 +553,7 @@

    Parallel Performance

    - + Z3++ 1.06208143 @@ -567,7 +567,7 @@

    Parallel Performance

    - + cvc5 1.03965393 @@ -581,7 +581,7 @@

    Parallel Performance

    - + Bitwuzla 1.03205531 @@ -595,7 +595,7 @@

    Parallel Performance

    - + Z3++ 1.0316687 @@ -609,7 +609,7 @@

    Parallel Performance

    - + SMTInterpol 1.02481669 @@ -623,7 +623,7 @@

    Parallel Performance

    - + STP 1.02380705 @@ -637,7 +637,7 @@

    Parallel Performance

    - + cvc5 1.01770536 @@ -651,7 +651,7 @@

    Parallel Performance

    - + cvc5 1.01078167 @@ -665,7 +665,7 @@

    Parallel Performance

    - + Yices2 1.00267738 @@ -679,7 +679,7 @@

    Parallel Performance

    - + Z3++ 1.00051573 @@ -693,7 +693,7 @@

    Parallel Performance

    - + Yices2 1.0 @@ -722,7 +722,7 @@

    SAT Performance

    - + cvc5 2.48339483 @@ -736,7 +736,7 @@

    SAT Performance

    - + cvc5 1.7470726 @@ -750,7 +750,7 @@

    SAT Performance

    - + Bitwuzla 1.43358396 @@ -764,7 +764,7 @@

    SAT Performance

    - + cvc5 1.32113145 @@ -778,7 +778,7 @@

    SAT Performance

    - + cvc5 1.17073171 @@ -792,7 +792,7 @@

    SAT Performance

    - + Z3++ 1.07444169 @@ -806,7 +806,7 @@

    SAT Performance

    - + Bitwuzla 1.06947368 @@ -820,7 +820,7 @@

    SAT Performance

    - + SMTInterpol 1.06639839 @@ -834,7 +834,7 @@

    SAT Performance

    - + Yices2 1.05474453 @@ -848,7 +848,7 @@

    SAT Performance

    - + cvc5 1.0515873 @@ -862,7 +862,7 @@

    SAT Performance

    - + Z3++ 1.04973894 @@ -876,7 +876,7 @@

    SAT Performance

    - + Bitwuzla 1.03643725 @@ -890,7 +890,7 @@

    SAT Performance

    - + YicesQS 1.03090909 @@ -904,7 +904,7 @@

    SAT Performance

    - + cvc5 1.0239389 @@ -918,7 +918,7 @@

    SAT Performance

    - + Z3++ 1.0157168 @@ -932,7 +932,7 @@

    SAT Performance

    - + Bitwuzla 1.01506024 @@ -946,7 +946,7 @@

    SAT Performance

    - + OpenSMT 1.00478469 @@ -960,7 +960,7 @@

    SAT Performance

    - + STP 1.00020951 @@ -974,7 +974,7 @@

    SAT Performance

    - + Yices2 1.0 @@ -1003,7 +1003,7 @@

    UNSAT Performance

    - + cvc5 3.84394904 @@ -1017,7 +1017,7 @@

    UNSAT Performance

    - + cvc5 2.95652174 @@ -1031,7 +1031,7 @@

    UNSAT Performance

    - + cvc5 1.94549266 @@ -1045,7 +1045,7 @@

    UNSAT Performance

    - + cvc5 1.24887244 @@ -1059,7 +1059,7 @@

    UNSAT Performance

    - + cvc5 1.22891566 @@ -1073,7 +1073,7 @@

    UNSAT Performance

    - + Bitwuzla 1.12101911 @@ -1087,7 +1087,7 @@

    UNSAT Performance

    - + cvc5 1.08769793 @@ -1101,7 +1101,7 @@

    UNSAT Performance

    - + cvc5 1.06625578 @@ -1115,7 +1115,7 @@

    UNSAT Performance

    - + Bitwuzla 1.06040268 @@ -1129,7 +1129,7 @@

    UNSAT Performance

    - + cvc5 1.0519084 @@ -1143,7 +1143,7 @@

    UNSAT Performance

    - + STP 1.04703941 @@ -1157,7 +1157,7 @@

    UNSAT Performance

    - + Bitwuzla 1.04537205 @@ -1171,7 +1171,7 @@

    UNSAT Performance

    - + Z3++ 1.01484169 @@ -1185,7 +1185,7 @@

    UNSAT Performance

    - + cvc5 1.01446945 @@ -1199,7 +1199,7 @@

    UNSAT Performance

    - + cvc5 1.01204819 @@ -1213,7 +1213,7 @@

    UNSAT Performance

    - + cvc5 1.0105553 @@ -1227,7 +1227,7 @@

    UNSAT Performance

    - + cvc5 1.00584909 @@ -1241,7 +1241,7 @@

    UNSAT Performance

    - + OpenSMT 1.00515464 @@ -1255,7 +1255,7 @@

    UNSAT Performance

    - + Yices2 1.0 @@ -1284,7 +1284,7 @@

    24s Performance

    - + SMTInterpol 5.0 @@ -1298,7 +1298,7 @@

    24s Performance

    - + cvc5 1.92173913 @@ -1312,7 +1312,7 @@

    24s Performance

    - + cvc5 1.5953263 @@ -1326,7 +1326,7 @@

    24s Performance

    - + cvc5 1.26755152 @@ -1340,7 +1340,7 @@

    24s Performance

    - + Vampire 1.21970443 @@ -1354,7 +1354,7 @@

    24s Performance

    - + Yices2 1.19973603 @@ -1368,7 +1368,7 @@

    24s Performance

    - + Bitwuzla 1.19274376 @@ -1382,7 +1382,7 @@

    24s Performance

    - + Bitwuzla 1.08614865 @@ -1396,7 +1396,7 @@

    24s Performance

    - + cvc5 1.07590759 @@ -1410,7 +1410,7 @@

    24s Performance

    - + Yices2 1.06304729 @@ -1424,7 +1424,7 @@

    24s Performance

    - + STP 1.05290645 @@ -1438,7 +1438,7 @@

    24s Performance

    - + YicesQS 1.04354469 @@ -1452,7 +1452,7 @@

    24s Performance

    - + Z3++ 1.0328862 @@ -1466,7 +1466,7 @@

    24s Performance

    - + Bitwuzla 1.01710526 @@ -1480,7 +1480,7 @@

    24s Performance

    - + cvc5 1.01390821 @@ -1494,7 +1494,7 @@

    24s Performance

    - + Yices2 1.01146651 @@ -1508,7 +1508,7 @@

    24s Performance

    - + Yices2 1.0094086 @@ -1522,7 +1522,7 @@

    24s Performance

    - + z3-alpha 1.00836191 @@ -1536,7 +1536,7 @@

    24s Performance

    - + Bitwuzla 1.00761246 @@ -1568,7 +1568,6 @@

    24s Performance

    - + - diff --git a/archive/2023/results/biggest-lead-unsat-core.html b/archive/2023/results/biggest-lead-unsat-core.html index e6df32a0..ae20526d 100644 --- a/archive/2023/results/biggest-lead-unsat-core.html +++ b/archive/2023/results/biggest-lead-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -90,35 +90,35 @@

    Winners

    - + - - + + - - - - + + + + - + - - + + - - - - + + + +
    Sequential PerformanceParallel Performance
    - + cvc5 - + - + cvc5 - +

    Sequential Performance

    @@ -136,7 +136,7 @@

    Sequential Performance

    - + cvc5 267.0 @@ -150,7 +150,7 @@

    Sequential Performance

    - + Yices2 7.09829159 @@ -164,7 +164,7 @@

    Sequential Performance

    - + cvc5 3.75750947 @@ -178,7 +178,7 @@

    Sequential Performance

    - + Yices2 3.35252754 @@ -192,7 +192,7 @@

    Sequential Performance

    - + Yices2 2.26997505 @@ -206,7 +206,7 @@

    Sequential Performance

    - + cvc5 2.21621622 @@ -220,7 +220,7 @@

    Sequential Performance

    - + cvc5 1.86335569 @@ -234,7 +234,7 @@

    Sequential Performance

    - + Bitwuzla 1.7 @@ -248,7 +248,7 @@

    Sequential Performance

    - + cvc5 1.6626049 @@ -262,7 +262,7 @@

    Sequential Performance

    - + cvc5 1.35714286 @@ -276,7 +276,7 @@

    Sequential Performance

    - + Yices2 1.26482986 @@ -290,7 +290,7 @@

    Sequential Performance

    - + cvc5 1.2448615 @@ -304,7 +304,7 @@

    Sequential Performance

    - + Yices2 1.23998769 @@ -318,7 +318,7 @@

    Sequential Performance

    - + Yices2 1.13962546 @@ -332,7 +332,7 @@

    Sequential Performance

    - + cvc5 1.01113993 @@ -346,7 +346,7 @@

    Sequential Performance

    - + cvc5 0.96007949 @@ -375,7 +375,7 @@

    Parallel Performance

    - + cvc5 267.0 @@ -389,7 +389,7 @@

    Parallel Performance

    - + Yices2 7.09829159 @@ -403,7 +403,7 @@

    Parallel Performance

    - + cvc5 3.72231227 @@ -417,7 +417,7 @@

    Parallel Performance

    - + Yices2 3.35252754 @@ -431,7 +431,7 @@

    Parallel Performance

    - + Yices2 2.26997505 @@ -445,7 +445,7 @@

    Parallel Performance

    - + cvc5 2.21621622 @@ -459,7 +459,7 @@

    Parallel Performance

    - + cvc5 1.86335569 @@ -473,7 +473,7 @@

    Parallel Performance

    - + Bitwuzla 1.7 @@ -487,7 +487,7 @@

    Parallel Performance

    - + cvc5 1.65095378 @@ -501,7 +501,7 @@

    Parallel Performance

    - + SMTInterpol 1.39200828 @@ -515,7 +515,7 @@

    Parallel Performance

    - + cvc5 1.35714286 @@ -529,7 +529,7 @@

    Parallel Performance

    - + Yices2 1.26482986 @@ -543,7 +543,7 @@

    Parallel Performance

    - + cvc5 1.24360782 @@ -557,7 +557,7 @@

    Parallel Performance

    - + Yices2 1.23998769 @@ -571,7 +571,7 @@

    Parallel Performance

    - + Yices2 1.12325707 @@ -585,7 +585,7 @@

    Parallel Performance

    - + cvc5 0.96007949 @@ -623,7 +623,6 @@

    Parallel Performance

    - + - diff --git a/archive/2023/results/bitvec-incremental.html b/archive/2023/results/bitvec-incremental.html index 9d8d86ab..32c9a45b 100644 --- a/archive/2023/results/bitvec-incremental.html +++ b/archive/2023/results/bitvec-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Bitvec (Incremental Track)

    Competition results for the Bitvec - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    Bitvec (Incremental Track)

    Parallel Performance cvc5 - - + + @@ -124,7 +124,7 @@

    Bitvec (Incremental Track)

    - + 2019-Z3n 0 371074495.594497.26174906 @@ -133,7 +133,7 @@

    Bitvec (Incremental Track)

    - + cvc5 0 358212939.262938.89303507 @@ -142,7 +142,7 @@

    Bitvec (Incremental Track)

    - + Bitwuzla 0 34666779.09779.36419007 @@ -151,7 +151,7 @@

    Bitvec (Incremental Track)

    - + UltimateEliminator+MathSAT 0 18912437.98237.241994401 @@ -175,7 +175,6 @@

    Bitvec (Incremental Track)

    - + - diff --git a/archive/2023/results/bitvec-proof-exhibition.html b/archive/2023/results/bitvec-proof-exhibition.html index 8dfcec9e..ed708243 100644 --- a/archive/2023/results/bitvec-proof-exhibition.html +++ b/archive/2023/results/bitvec-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Bitvec (Proof Exhibition Track)

    Competition results for the Bitvec - + division - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    Bitvec (Proof Exhibition Track)

    - + cvc5-lfsc 0 1317 @@ -130,7 +130,7 @@

    Bitvec (Proof Exhibition Track)

    - + cvc5 0 1103 @@ -152,7 +152,7 @@

    Bitvec (Proof Exhibition Track)

    - + cvc5-lfsc 0 131730477.82330353.5381110108 @@ -161,7 +161,7 @@

    Bitvec (Proof Exhibition Track)

    - + cvc5 0 11031791.3011774.9143250316 @@ -185,7 +185,6 @@

    Bitvec (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/bitvec-single-query.html b/archive/2023/results/bitvec-single-query.html index ac15249b..2a7514a8 100644 --- a/archive/2023/results/bitvec-single-query.html +++ b/archive/2023/results/bitvec-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Bitvec (Single Query Track)

    Competition results for the Bitvec - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    Bitvec (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5Bitwuzla - - + + cvc5 - - + + Bitwuzla - + @@ -131,7 +131,7 @@

    Bitvec (Single Query Track)

    - + 2019-Par4n 0 961 @@ -142,7 +142,7 @@

    Bitvec (Single Query Track)

    - + cvc5 0 937 @@ -153,7 +153,7 @@

    Bitvec (Single Query Track)

    - + Bitwuzla 0 838 @@ -164,7 +164,7 @@

    Bitvec (Single Query Track)

    - + Bitwuzla Fixedn 0 838 @@ -175,7 +175,7 @@

    Bitvec (Single Query Track)

    - + YicesQS 0 749 @@ -186,7 +186,7 @@

    Bitvec (Single Query Track)

    - + UltimateEliminator+MathSAT 0 310 @@ -197,7 +197,7 @@

    Bitvec (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 0 5 @@ -208,7 +208,7 @@

    Bitvec (Single Query Track)

    - + Q3B 4 889 @@ -230,7 +230,7 @@

    Bitvec (Single Query Track)

    - + 2019-Par4n 0 96825562.3236627.5996825571372072 @@ -239,7 +239,7 @@

    Bitvec (Single Query Track)

    - + cvc5 0 93734547.17235619.0289372466911030103 @@ -248,7 +248,7 @@

    Bitvec (Single Query Track)

    - + Bitwuzla Fixedn 0 83810824.96810799.9858382555832020202 @@ -257,7 +257,7 @@

    Bitvec (Single Query Track)

    - + Bitwuzla 0 83810821.12910804.538382555832020202 @@ -266,7 +266,7 @@

    Bitvec (Single Query Track)

    - + YicesQS 0 7494938.4114938.9467492115382910290 @@ -275,7 +275,7 @@

    Bitvec (Single Query Track)

    - + UltimateEliminator+MathSAT 0 3104241.813240.701310292817300137 @@ -284,7 +284,7 @@

    Bitvec (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 0 582.87534.61551410350208 @@ -293,7 +293,7 @@

    Bitvec (Single Query Track)

    - + Q3B 4 89624903.6389234.0618962486481440139 @@ -313,7 +313,7 @@

    Bitvec (Single Query Track)

    - + Bitwuzla 0 2552482.1042482.427255255017768202 @@ -322,7 +322,7 @@

    Bitvec (Single Query Track)

    - + Bitwuzla Fixedn 0 2552511.1532498.285255255017768202 @@ -331,7 +331,7 @@

    Bitvec (Single Query Track)

    - + 2019-Par4n 0 25511604.9593019.36525525501776872 @@ -340,7 +340,7 @@

    Bitvec (Single Query Track)

    - + cvc5 0 24623401.59924066.333246246026768103 @@ -349,7 +349,7 @@

    Bitvec (Single Query Track)

    - + YicesQS 0 2112456.7632457.058211211061768290 @@ -358,7 +358,7 @@

    Bitvec (Single Query Track)

    - + UltimateEliminator+MathSAT 0 291836.5051561.6829290243768137 @@ -367,7 +367,7 @@

    Bitvec (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 0 111.3864.401110271768208 @@ -376,7 +376,7 @@

    Bitvec (Single Query Track)

    - + Q3B 4 24816618.0596415.328248248024768139 @@ -396,7 +396,7 @@

    Bitvec (Single Query Track)

    - + 2019-Par4n 0 71313957.3643608.22571307133729072 @@ -405,7 +405,7 @@

    Bitvec (Single Query Track)

    - + cvc5 0 69111145.57411552.695691069159290103 @@ -414,7 +414,7 @@

    Bitvec (Single Query Track)

    - + Q3B 0 6488285.5792818.7336480648102290139 @@ -423,7 +423,7 @@

    Bitvec (Single Query Track)

    - + Bitwuzla Fixedn 0 5838313.8158301.75830583167290202 @@ -432,7 +432,7 @@

    Bitvec (Single Query Track)

    - + Bitwuzla 0 5838339.0258322.1035830583167290202 @@ -441,7 +441,7 @@

    Bitvec (Single Query Track)

    - + YicesQS 0 5382481.6472481.8895380538212290290 @@ -450,7 +450,7 @@

    Bitvec (Single Query Track)

    - + UltimateEliminator+MathSAT 0 2812405.3051679.0222810281469290137 @@ -459,7 +459,7 @@

    Bitvec (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 0 471.48930.214404746290208 @@ -479,7 +479,7 @@

    Bitvec (Single Query Track)

    - + 2019-Par4n 0 9342168.49629.0779342396951060106 @@ -488,7 +488,7 @@

    Bitvec (Single Query Track)

    - + Bitwuzla Fixedn 0 772885.443858.6857722265462680268 @@ -497,7 +497,7 @@

    Bitvec (Single Query Track)

    - + Bitwuzla 0 772876.927858.9457722265462680268 @@ -506,7 +506,7 @@

    Bitvec (Single Query Track)

    - + cvc5 0 7591207.0581178.8197591396202810281 @@ -515,7 +515,7 @@

    Bitvec (Single Query Track)

    - + YicesQS 0 730311.585311.6187302055253100310 @@ -524,7 +524,7 @@

    Bitvec (Single Query Track)

    - + UltimateEliminator+MathSAT 0 2911842.1931040.097291202717490192 @@ -533,7 +533,7 @@

    Bitvec (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 0 582.87534.61551410350288 @@ -542,7 +542,7 @@

    Bitvec (Single Query Track)

    - + Q3B 4 8562190.452831.4918562256311840180 @@ -566,7 +566,6 @@

    Bitvec (Single Query Track)

    - + - diff --git a/archive/2023/results/bitvec-unsat-core.html b/archive/2023/results/bitvec-unsat-core.html index 5b8fd06b..1dee12bd 100644 --- a/archive/2023/results/bitvec-unsat-core.html +++ b/archive/2023/results/bitvec-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Bitvec (Unsat Core Track)

    Competition results for the Bitvec - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    Bitvec (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    Bitvec (Unsat Core Track)

    - + 2020-CVC4-ucn 0 91 @@ -137,7 +137,7 @@

    Bitvec (Unsat Core Track)

    - + cvc5 0 81 @@ -148,7 +148,7 @@

    Bitvec (Unsat Core Track)

    - + Bitwuzla 0 36 @@ -159,7 +159,7 @@

    Bitvec (Unsat Core Track)

    - + Bitwuzla Fixedn 0 36 @@ -170,7 +170,7 @@

    Bitvec (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -192,7 +192,7 @@

    Bitvec (Unsat Core Track)

    - + 2020-CVC4-ucn 0 91958.526954.33688 @@ -201,7 +201,7 @@

    Bitvec (Unsat Core Track)

    - + cvc5 0 811167.7091166.67288 @@ -210,7 +210,7 @@

    Bitvec (Unsat Core Track)

    - + Bitwuzla 0 363295.3933296.124150 @@ -219,7 +219,7 @@

    Bitvec (Unsat Core Track)

    - + Bitwuzla Fixedn 0 363466.6063461.943150 @@ -228,7 +228,7 @@

    Bitvec (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 06.723.4080 @@ -252,7 +252,6 @@

    Bitvec (Unsat Core Track)

    - + - diff --git a/archive/2023/results/bv-incremental.html b/archive/2023/results/bv-incremental.html index 9ab21676..97e9045a 100644 --- a/archive/2023/results/bv-incremental.html +++ b/archive/2023/results/bv-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    BV (Incremental Track)

    Competition results for the BV - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    BV (Incremental Track)

    Parallel Performance cvc5 - - + + @@ -124,7 +124,7 @@

    BV (Incremental Track)

    - + 2019-Z3n 0 371074495.594497.2617496 @@ -133,7 +133,7 @@

    BV (Incremental Track)

    - + cvc5 0 358212939.262938.8930357 @@ -142,7 +142,7 @@

    BV (Incremental Track)

    - + Bitwuzla 0 34666779.09779.3641907 @@ -151,7 +151,7 @@

    BV (Incremental Track)

    - + UltimateEliminator+MathSAT 0 18912437.98237.24199441 @@ -175,7 +175,6 @@

    BV (Incremental Track)

    - + - diff --git a/archive/2023/results/bv-proof-exhibition.html b/archive/2023/results/bv-proof-exhibition.html index a9cd2068..fbf7ba8d 100644 --- a/archive/2023/results/bv-proof-exhibition.html +++ b/archive/2023/results/bv-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    BV (Proof Exhibition Track)

    Competition results for the BV - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    BV (Proof Exhibition Track)

    - + cvc5-lfsc 0 1317 @@ -130,7 +130,7 @@

    BV (Proof Exhibition Track)

    - + cvc5 0 1103 @@ -152,7 +152,7 @@

    BV (Proof Exhibition Track)

    - + cvc5-lfsc 0 131730477.82330353.538111108 @@ -161,7 +161,7 @@

    BV (Proof Exhibition Track)

    - + cvc5 0 11031791.3011774.914325316 @@ -185,7 +185,6 @@

    BV (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/bv-single-query.html b/archive/2023/results/bv-single-query.html index 86909faa..fd28ee48 100644 --- a/archive/2023/results/bv-single-query.html +++ b/archive/2023/results/bv-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    BV (Single Query Track)

    Competition results for the BV - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    BV (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5Bitwuzla - - + + cvc5 - - + + Bitwuzla - + @@ -131,7 +131,7 @@

    BV (Single Query Track)

    - + 2019-Par4n 0 961 @@ -142,7 +142,7 @@

    BV (Single Query Track)

    - + cvc5 0 937 @@ -153,7 +153,7 @@

    BV (Single Query Track)

    - + Bitwuzla 0 838 @@ -164,7 +164,7 @@

    BV (Single Query Track)

    - + Bitwuzla Fixedn 0 838 @@ -175,7 +175,7 @@

    BV (Single Query Track)

    - + YicesQS 0 749 @@ -186,7 +186,7 @@

    BV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 310 @@ -197,7 +197,7 @@

    BV (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 0 5 @@ -208,7 +208,7 @@

    BV (Single Query Track)

    - + Q3B 4 889 @@ -230,7 +230,7 @@

    BV (Single Query Track)

    - + 2019-Par4n 0 96825562.3236627.599682557137272 @@ -239,7 +239,7 @@

    BV (Single Query Track)

    - + cvc5 0 93734547.17235619.028937246691103103 @@ -248,7 +248,7 @@

    BV (Single Query Track)

    - + Bitwuzla Fixedn 0 83810824.96810799.985838255583202202 @@ -257,7 +257,7 @@

    BV (Single Query Track)

    - + Bitwuzla 0 83810821.12910804.53838255583202202 @@ -266,7 +266,7 @@

    BV (Single Query Track)

    - + YicesQS 0 7494938.4114938.946749211538291290 @@ -275,7 +275,7 @@

    BV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 3104241.813240.70131029281730137 @@ -284,7 +284,7 @@

    BV (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 0 582.87534.6155141035208 @@ -293,7 +293,7 @@

    BV (Single Query Track)

    - + Q3B 4 89624903.6389234.061896248648144139 @@ -313,7 +313,7 @@

    BV (Single Query Track)

    - + Bitwuzla 0 2552482.1042482.427255255017768202 @@ -322,7 +322,7 @@

    BV (Single Query Track)

    - + Bitwuzla Fixedn 0 2552511.1532498.285255255017768202 @@ -331,7 +331,7 @@

    BV (Single Query Track)

    - + 2019-Par4n 0 25511604.9593019.36525525501776872 @@ -340,7 +340,7 @@

    BV (Single Query Track)

    - + cvc5 0 24623401.59924066.333246246026768103 @@ -349,7 +349,7 @@

    BV (Single Query Track)

    - + YicesQS 0 2112456.7632457.058211211061768290 @@ -358,7 +358,7 @@

    BV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 291836.5051561.6829290243768137 @@ -367,7 +367,7 @@

    BV (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 0 111.3864.401110271768208 @@ -376,7 +376,7 @@

    BV (Single Query Track)

    - + Q3B 4 24816618.0596415.328248248024768139 @@ -396,7 +396,7 @@

    BV (Single Query Track)

    - + 2019-Par4n 0 71313957.3643608.22571307133729072 @@ -405,7 +405,7 @@

    BV (Single Query Track)

    - + cvc5 0 69111145.57411552.695691069159290103 @@ -414,7 +414,7 @@

    BV (Single Query Track)

    - + Q3B 0 6488285.5792818.7336480648102290139 @@ -423,7 +423,7 @@

    BV (Single Query Track)

    - + Bitwuzla Fixedn 0 5838313.8158301.75830583167290202 @@ -432,7 +432,7 @@

    BV (Single Query Track)

    - + Bitwuzla 0 5838339.0258322.1035830583167290202 @@ -441,7 +441,7 @@

    BV (Single Query Track)

    - + YicesQS 0 5382481.6472481.8895380538212290290 @@ -450,7 +450,7 @@

    BV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 2812405.3051679.0222810281469290137 @@ -459,7 +459,7 @@

    BV (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 0 471.48930.214404746290208 @@ -479,7 +479,7 @@

    BV (Single Query Track)

    - + 2019-Par4n 0 9342168.49629.077934239695106106 @@ -488,7 +488,7 @@

    BV (Single Query Track)

    - + Bitwuzla Fixedn 0 772885.443858.685772226546268268 @@ -497,7 +497,7 @@

    BV (Single Query Track)

    - + Bitwuzla 0 772876.927858.945772226546268268 @@ -506,7 +506,7 @@

    BV (Single Query Track)

    - + cvc5 0 7591207.0581178.819759139620281281 @@ -515,7 +515,7 @@

    BV (Single Query Track)

    - + YicesQS 0 730311.585311.618730205525310310 @@ -524,7 +524,7 @@

    BV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 2911842.1931040.09729120271749192 @@ -533,7 +533,7 @@

    BV (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 0 582.87534.6155141035288 @@ -542,7 +542,7 @@

    BV (Single Query Track)

    - + Q3B 4 8562190.452831.491856225631184180 @@ -566,7 +566,6 @@

    BV (Single Query Track)

    - + - diff --git a/archive/2023/results/bv-unsat-core.html b/archive/2023/results/bv-unsat-core.html index e4139d86..8b753bd9 100644 --- a/archive/2023/results/bv-unsat-core.html +++ b/archive/2023/results/bv-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    BV (Unsat Core Track)

    Competition results for the BV - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    BV (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    BV (Unsat Core Track)

    - + 2020-CVC4-ucn 0 91 @@ -137,7 +137,7 @@

    BV (Unsat Core Track)

    - + cvc5 0 81 @@ -148,7 +148,7 @@

    BV (Unsat Core Track)

    - + Bitwuzla 0 36 @@ -159,7 +159,7 @@

    BV (Unsat Core Track)

    - + Bitwuzla Fixedn 0 36 @@ -170,7 +170,7 @@

    BV (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -192,7 +192,7 @@

    BV (Unsat Core Track)

    - + 2020-CVC4-ucn 0 91958.526954.33688 @@ -201,7 +201,7 @@

    BV (Unsat Core Track)

    - + cvc5 0 811167.7091166.67288 @@ -210,7 +210,7 @@

    BV (Unsat Core Track)

    - + Bitwuzla 0 363295.3933296.124150 @@ -219,7 +219,7 @@

    BV (Unsat Core Track)

    - + Bitwuzla Fixedn 0 363466.6063461.943150 @@ -228,7 +228,7 @@

    BV (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 06.723.4080 @@ -252,7 +252,6 @@

    BV (Unsat Core Track)

    - + - diff --git a/archive/2023/results/bvfp-incremental.html b/archive/2023/results/bvfp-incremental.html index 58e4d27a..4b58e484 100644 --- a/archive/2023/results/bvfp-incremental.html +++ b/archive/2023/results/bvfp-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    BVFP (Incremental Track)

    Competition results for the BVFP - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    BVFP (Incremental Track)

    Parallel Performance Bitwuzla - - + + @@ -124,7 +124,7 @@

    BVFP (Incremental Track)

    - + Bitwuzla 0 45819.5819.6100 @@ -133,7 +133,7 @@

    BVFP (Incremental Track)

    - + 2022-Bitwuzlan 0 28574.3874.441730 @@ -142,7 +142,7 @@

    BVFP (Incremental Track)

    - + cvc5 0 2311.661.692271 @@ -151,7 +151,7 @@

    BVFP (Incremental Track)

    - + UltimateEliminator+MathSAT 0 23147.8841.162270 @@ -175,7 +175,6 @@

    BVFP (Incremental Track)

    - + - diff --git a/archive/2023/results/bvfp-proof-exhibition.html b/archive/2023/results/bvfp-proof-exhibition.html index f98a1fd5..44249912 100644 --- a/archive/2023/results/bvfp-proof-exhibition.html +++ b/archive/2023/results/bvfp-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    BVFP (Proof Exhibition Track)

    Competition results for the BVFP - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    BVFP (Proof Exhibition Track)

    - + cvc5 0 0 @@ -130,7 +130,7 @@

    BVFP (Proof Exhibition Track)

    - + cvc5-lfsc 0 0 @@ -152,7 +152,7 @@

    BVFP (Proof Exhibition Track)

    - + cvc5 0 00.00.0144 @@ -161,7 +161,7 @@

    BVFP (Proof Exhibition Track)

    - + cvc5-lfsc 0 00.00.0144 @@ -185,7 +185,6 @@

    BVFP (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/bvfp-single-query.html b/archive/2023/results/bvfp-single-query.html index 112a4381..111f6fc6 100644 --- a/archive/2023/results/bvfp-single-query.html +++ b/archive/2023/results/bvfp-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    BVFP (Single Query Track)

    Competition results for the BVFP - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    BVFP (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) BitwuzlaBitwuzlaBitwuzla - - + + Bitwuzla - - + + Bitwuzla - + @@ -131,7 +131,7 @@

    BVFP (Single Query Track)

    - + Bitwuzla 0 193 @@ -142,7 +142,7 @@

    BVFP (Single Query Track)

    - + Bitwuzla Fixedn 0 193 @@ -153,7 +153,7 @@

    BVFP (Single Query Track)

    - + cvc5 0 175 @@ -164,7 +164,7 @@

    BVFP (Single Query Track)

    - + 2022-Bitwuzlan 0 152 @@ -175,7 +175,7 @@

    BVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 28 @@ -197,7 +197,7 @@

    BVFP (Single Query Track)

    - + Bitwuzla 0 193661.381661.555193180131515 @@ -206,7 +206,7 @@

    BVFP (Single Query Track)

    - + Bitwuzla Fixedn 0 193673.212673.335193180131515 @@ -215,7 +215,7 @@

    BVFP (Single Query Track)

    - + cvc5 0 17510224.92810452.60817516873333 @@ -224,7 +224,7 @@

    BVFP (Single Query Track)

    - + 2022-Bitwuzlan 0 1522003.9962004.39152138145655 @@ -233,7 +233,7 @@

    BVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 28389.788332.72282801800 @@ -253,7 +253,7 @@

    BVFP (Single Query Track)

    - + Bitwuzla 0 180348.642348.737180180002815 @@ -262,7 +262,7 @@

    BVFP (Single Query Track)

    - + Bitwuzla Fixedn 0 180355.834355.914180180002815 @@ -271,7 +271,7 @@

    BVFP (Single Query Track)

    - + cvc5 0 1688496.4728723.7281681680122833 @@ -280,7 +280,7 @@

    BVFP (Single Query Track)

    - + 2022-Bitwuzlan 0 1381641.3571641.691381380422855 @@ -289,7 +289,7 @@

    BVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 28389.788332.7228280152280 @@ -309,7 +309,7 @@

    BVFP (Single Query Track)

    - + 2022-Bitwuzlan 0 14362.638362.714014019455 @@ -318,7 +318,7 @@

    BVFP (Single Query Track)

    - + Bitwuzla 0 13312.74312.81813013119415 @@ -327,7 +327,7 @@

    BVFP (Single Query Track)

    - + Bitwuzla Fixedn 0 13317.378317.42113013119415 @@ -336,7 +336,7 @@

    BVFP (Single Query Track)

    - + cvc5 0 71728.4561728.88707719433 @@ -345,7 +345,7 @@

    BVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.0000141940 @@ -365,7 +365,7 @@

    BVFP (Single Query Track)

    - + Bitwuzla 0 18942.23242.277189179101919 @@ -374,7 +374,7 @@

    BVFP (Single Query Track)

    - + Bitwuzla Fixedn 0 18942.32542.366189179101919 @@ -383,7 +383,7 @@

    BVFP (Single Query Track)

    - + 2022-Bitwuzlan 0 14793.793.71147135126160 @@ -392,7 +392,7 @@

    BVFP (Single Query Track)

    - + cvc5 0 13833.10233.0613813807070 @@ -401,7 +401,7 @@

    BVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 22140.196.991222201866 @@ -425,7 +425,6 @@

    BVFP (Single Query Track)

    - + - diff --git a/archive/2023/results/bvfp-unsat-core.html b/archive/2023/results/bvfp-unsat-core.html index a33459cf..b8c26465 100644 --- a/archive/2023/results/bvfp-unsat-core.html +++ b/archive/2023/results/bvfp-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    BVFP (Unsat Core Track)

    Competition results for the BVFP - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    BVFP (Unsat Core Track)

    Sequential PerformanceParallel Performance —— - - + + @@ -126,7 +126,7 @@

    BVFP (Unsat Core Track)

    - + 2020-CVC4-ucn 0 0 @@ -137,7 +137,7 @@

    BVFP (Unsat Core Track)

    - + cvc5 0 0 @@ -148,7 +148,7 @@

    BVFP (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -159,7 +159,7 @@

    BVFP (Unsat Core Track)

    - + Bitwuzla 0 0 @@ -170,7 +170,7 @@

    BVFP (Unsat Core Track)

    - + Bitwuzla Fixedn 0 0 @@ -192,7 +192,7 @@

    BVFP (Unsat Core Track)

    - + 2020-CVC4-ucn 0 00.00.00 @@ -201,7 +201,7 @@

    BVFP (Unsat Core Track)

    - + cvc5 0 00.00.00 @@ -210,7 +210,7 @@

    BVFP (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 00.00.00 @@ -219,7 +219,7 @@

    BVFP (Unsat Core Track)

    - + Bitwuzla 0 037.1737.1970 @@ -228,7 +228,7 @@

    BVFP (Unsat Core Track)

    - + Bitwuzla Fixedn 0 037.96337.9660 @@ -252,7 +252,6 @@

    BVFP (Unsat Core Track)

    - + - diff --git a/archive/2023/results/bvfplra-incremental.html b/archive/2023/results/bvfplra-incremental.html index 89d63592..0ccbf03b 100644 --- a/archive/2023/results/bvfplra-incremental.html +++ b/archive/2023/results/bvfplra-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    BVFPLRA (Incremental Track)

    Competition results for the BVFPLRA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    BVFPLRA (Incremental Track)

    Parallel Performance Bitwuzla - - + + @@ -124,7 +124,7 @@

    BVFPLRA (Incremental Track)

    - + Bitwuzla 0 4797515.31516.028002 @@ -133,7 +133,7 @@

    BVFPLRA (Incremental Track)

    - + 2022-Bitwuzlan 0 3144102.79103.5624536 @@ -142,7 +142,7 @@

    BVFPLRA (Incremental Track)

    - + cvc5 0 296450.2951.2826336 @@ -151,7 +151,7 @@

    BVFPLRA (Incremental Track)

    - + UltimateEliminator+MathSAT 0 2157192.12132.2634400 @@ -175,7 +175,6 @@

    BVFPLRA (Incremental Track)

    - + - diff --git a/archive/2023/results/bvfplra-proof-exhibition.html b/archive/2023/results/bvfplra-proof-exhibition.html index e422203b..829c9dbe 100644 --- a/archive/2023/results/bvfplra-proof-exhibition.html +++ b/archive/2023/results/bvfplra-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    BVFPLRA (Proof Exhibition Track)

    Competition results for the BVFPLRA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    BVFPLRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 7 @@ -130,7 +130,7 @@

    BVFPLRA (Proof Exhibition Track)

    - + cvc5 0 2 @@ -152,7 +152,7 @@

    BVFPLRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 71057.038767.1761717 @@ -161,7 +161,7 @@

    BVFPLRA (Proof Exhibition Track)

    - + cvc5 0 21.7261.7232222 @@ -185,7 +185,6 @@

    BVFPLRA (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/bvfplra-single-query.html b/archive/2023/results/bvfplra-single-query.html index fa57c761..cda12fef 100644 --- a/archive/2023/results/bvfplra-single-query.html +++ b/archive/2023/results/bvfplra-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    BVFPLRA (Single Query Track)

    Competition results for the BVFPLRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    BVFPLRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) BitwuzlaBitwuzlaBitwuzla - - + + Bitwuzla - - + + Bitwuzla - + @@ -131,7 +131,7 @@

    BVFPLRA (Single Query Track)

    - + Bitwuzla 0 257 @@ -142,7 +142,7 @@

    BVFPLRA (Single Query Track)

    - + Bitwuzla Fixedn 0 257 @@ -153,7 +153,7 @@

    BVFPLRA (Single Query Track)

    - + 2022-Bitwuzlan 0 172 @@ -164,7 +164,7 @@

    BVFPLRA (Single Query Track)

    - + cvc5 0 133 @@ -175,7 +175,7 @@

    BVFPLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 74 @@ -197,7 +197,7 @@

    BVFPLRA (Single Query Track)

    - + Bitwuzla Fixedn 0 25766.66465.172572332499 @@ -206,7 +206,7 @@

    BVFPLRA (Single Query Track)

    - + Bitwuzla 0 25765.46365.5112572332499 @@ -215,7 +215,7 @@

    BVFPLRA (Single Query Track)

    - + 2022-Bitwuzlan 0 172498.928498.997172148249491 @@ -224,7 +224,7 @@

    BVFPLRA (Single Query Track)

    - + cvc5 0 133164.515158.48913310924133101 @@ -233,7 +233,7 @@

    BVFPLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 74615.038457.9347452221920 @@ -253,7 +253,7 @@

    BVFPLRA (Single Query Track)

    - + Bitwuzla Fixedn 0 23361.37459.87723323300339 @@ -262,7 +262,7 @@

    BVFPLRA (Single Query Track)

    - + Bitwuzla 0 23360.10660.14623323300339 @@ -271,7 +271,7 @@

    BVFPLRA (Single Query Track)

    - + 2022-Bitwuzlan 0 148494.4494.4651481480853391 @@ -280,7 +280,7 @@

    BVFPLRA (Single Query Track)

    - + cvc5 0 10998.98496.35109109012433101 @@ -289,7 +289,7 @@

    BVFPLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 52473.036363.9452520181330 @@ -309,7 +309,7 @@

    BVFPLRA (Single Query Track)

    - + 2022-Bitwuzlan 0 244.5284.53224024024291 @@ -318,7 +318,7 @@

    BVFPLRA (Single Query Track)

    - + Bitwuzla Fixedn 0 245.295.2932402402429 @@ -327,7 +327,7 @@

    BVFPLRA (Single Query Track)

    - + Bitwuzla 0 245.3575.3652402402429 @@ -336,7 +336,7 @@

    BVFPLRA (Single Query Track)

    - + cvc5 0 2465.53162.139240240242101 @@ -345,7 +345,7 @@

    BVFPLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 22142.00293.9952202222420 @@ -365,7 +365,7 @@

    BVFPLRA (Single Query Track)

    - + Bitwuzla Fixedn 0 25766.66465.172572332499 @@ -374,7 +374,7 @@

    BVFPLRA (Single Query Track)

    - + Bitwuzla 0 25765.46365.5112572332499 @@ -383,7 +383,7 @@

    BVFPLRA (Single Query Track)

    - + 2022-Bitwuzlan 0 17151.05151.109171147249592 @@ -392,7 +392,7 @@

    BVFPLRA (Single Query Track)

    - + cvc5 0 133164.515158.48913310924133101 @@ -401,7 +401,7 @@

    BVFPLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 74615.038457.9347452221922 @@ -425,7 +425,6 @@

    BVFPLRA (Single Query Track)

    - + - diff --git a/archive/2023/results/bvfplra-unsat-core.html b/archive/2023/results/bvfplra-unsat-core.html index 7b901fdb..d0f1ed1e 100644 --- a/archive/2023/results/bvfplra-unsat-core.html +++ b/archive/2023/results/bvfplra-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    BVFPLRA (Unsat Core Track)

    Competition results for the BVFPLRA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    BVFPLRA (Unsat Core Track)

    Sequential PerformanceParallel Performance BitwuzlaBitwuzla - - + + @@ -126,7 +126,7 @@

    BVFPLRA (Unsat Core Track)

    - + Bitwuzla Fixedn 0 50 @@ -137,7 +137,7 @@

    BVFPLRA (Unsat Core Track)

    - + Bitwuzla 0 50 @@ -148,7 +148,7 @@

    BVFPLRA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 50 @@ -159,7 +159,7 @@

    BVFPLRA (Unsat Core Track)

    - + cvc5 0 29 @@ -170,7 +170,7 @@

    BVFPLRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -192,7 +192,7 @@

    BVFPLRA (Unsat Core Track)

    - + Bitwuzla Fixedn 0 504.0944.0940 @@ -201,7 +201,7 @@

    BVFPLRA (Unsat Core Track)

    - + Bitwuzla 0 504.1154.1160 @@ -210,7 +210,7 @@

    BVFPLRA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 5011.54811.5470 @@ -219,7 +219,7 @@

    BVFPLRA (Unsat Core Track)

    - + cvc5 0 2930.5230.5180 @@ -228,7 +228,7 @@

    BVFPLRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 00.00.00 @@ -252,7 +252,6 @@

    BVFPLRA (Unsat Core Track)

    - + - diff --git a/archive/2023/results/equality-cloud.html b/archive/2023/results/equality-cloud.html index 6ebf7d61..b3cd7268 100644 --- a/archive/2023/results/equality-cloud.html +++ b/archive/2023/results/equality-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Equality (Cloud Track)

    Competition results for the Equality - + division - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    Equality (Cloud Track)

    - + cvc5 0 4012529.4524019219600 @@ -126,7 +126,7 @@

    Equality (Cloud Track)

    - + Vampire 3 5214032.6945225278400 @@ -146,7 +146,7 @@

    Equality (Cloud Track)

    - + Vampire 0 257906.2772525081030 @@ -155,7 +155,7 @@

    Equality (Cloud Track)

    - + cvc5 0 199135.32519190141030 @@ -175,7 +175,7 @@

    Equality (Cloud Track)

    - + cvc5 0 213394.12721021141010 @@ -184,7 +184,7 @@

    Equality (Cloud Track)

    - + Vampire 3 276126.4172702781010 @@ -204,7 +204,7 @@

    Equality (Cloud Track)

    - + cvc5 0 983.7119181270118 @@ -213,7 +213,7 @@

    Equality (Cloud Track)

    - + Vampire 0 00.00001360136 @@ -237,7 +237,6 @@

    Equality (Cloud Track)

    - + - diff --git a/archive/2023/results/equality-incremental.html b/archive/2023/results/equality-incremental.html index fa01ff28..daebdaed 100644 --- a/archive/2023/results/equality-incremental.html +++ b/archive/2023/results/equality-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Equality (Incremental Track)

    Competition results for the Equality - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    Equality (Incremental Track)

    Parallel Performance cvc5 - - + + @@ -124,7 +124,7 @@

    Equality (Incremental Track)

    - + 2020-z3n 0 107078536819.18537015.772524990919 @@ -133,7 +133,7 @@

    Equality (Incremental Track)

    - + cvc5 0 2688139903.8740050.993326960932 @@ -142,7 +142,7 @@

    Equality (Incremental Track)

    - + SMTInterpol 0 18342138811.7678847.4334123501621 @@ -151,7 +151,7 @@

    Equality (Incremental Track)

    - + UltimateEliminator+MathSAT 0 00.00.035957700 @@ -160,7 +160,7 @@

    Equality (Incremental Track)

    - + Yices2 0 00.00.035957700 @@ -169,7 +169,7 @@

    Equality (Incremental Track)

    - + Yices2 Fixedn 0 00.00.035957700 @@ -193,7 +193,6 @@

    Equality (Incremental Track)

    - + - diff --git a/archive/2023/results/equality-lineararith-cloud.html b/archive/2023/results/equality-lineararith-cloud.html index dca38f8b..fd062f1a 100644 --- a/archive/2023/results/equality-lineararith-cloud.html +++ b/archive/2023/results/equality-lineararith-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Equality+LinearArith (Cloud Track)

    Competition results for the Equality+LinearArith - + division - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    Equality+LinearArith (Cloud Track)

    - + cvc5 0 282952.686281999500 @@ -126,7 +126,7 @@

    Equality+LinearArith (Cloud Track)

    - + Vampire 14 4827476.155480487500 @@ -146,7 +146,7 @@

    Equality+LinearArith (Cloud Track)

    - + cvc5 0 19103.431919041000 @@ -155,7 +155,7 @@

    Equality+LinearArith (Cloud Track)

    - + Vampire 0 00.0000231000 @@ -175,7 +175,7 @@

    Equality+LinearArith (Cloud Track)

    - + cvc5 0 92849.25690945690 @@ -184,7 +184,7 @@

    Equality+LinearArith (Cloud Track)

    - + Vampire 14 4827476.155480486690 @@ -204,7 +204,7 @@

    Equality+LinearArith (Cloud Track)

    - + cvc5 0 20114.41320191103098 @@ -213,7 +213,7 @@

    Equality+LinearArith (Cloud Track)

    - + Vampire 0 00.00001230123 @@ -237,7 +237,6 @@

    Equality+LinearArith (Cloud Track)

    - + - diff --git a/archive/2023/results/equality-lineararith-incremental.html b/archive/2023/results/equality-lineararith-incremental.html index f8f90167..1c83e7bb 100644 --- a/archive/2023/results/equality-lineararith-incremental.html +++ b/archive/2023/results/equality-lineararith-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Equality+LinearArith (Incremental Track)

    Competition results for the Equality+LinearArith - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    Equality+LinearArith (Incremental Track)

    Parallel Performance cvc5 - - + + @@ -124,7 +124,7 @@

    Equality+LinearArith (Incremental Track)

    - + 2021-z3n 0 558031146067.58146110.55208900058 @@ -133,7 +133,7 @@

    Equality+LinearArith (Incremental Track)

    - + cvc5 0 32397442244.2642272.35442957017 @@ -142,7 +142,7 @@

    Equality+LinearArith (Incremental Track)

    - + SMTInterpol 0 318266238054.68226921.354486650215 @@ -151,7 +151,7 @@

    Equality+LinearArith (Incremental Track)

    - + UltimateEliminator+MathSAT 0 18916913714.5312474.8557776203 @@ -175,7 +175,6 @@

    Equality+LinearArith (Incremental Track)

    - + - diff --git a/archive/2023/results/equality-lineararith-parallel.html b/archive/2023/results/equality-lineararith-parallel.html index 3964d193..efb0fcf2 100644 --- a/archive/2023/results/equality-lineararith-parallel.html +++ b/archive/2023/results/equality-lineararith-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Equality+LinearArith (Parallel Track)

    Competition results for the Equality+LinearArith - + division - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    Equality+LinearArith (Parallel Track)

    - + iProver 0 145.24610110200 @@ -126,7 +126,7 @@

    Equality+LinearArith (Parallel Track)

    - + Vampire 13 436773.31430436000 @@ -146,7 +146,7 @@

    Equality+LinearArith (Parallel Track)

    - + Vampire 0 00.000017860 @@ -155,7 +155,7 @@

    Equality+LinearArith (Parallel Track)

    - + iProver 0 00.000017860 @@ -175,7 +175,7 @@

    Equality+LinearArith (Parallel Track)

    - + iProver 0 145.24610143590 @@ -184,7 +184,7 @@

    Equality+LinearArith (Parallel Track)

    - + Vampire 13 436773.31430431590 @@ -204,7 +204,7 @@

    Equality+LinearArith (Parallel Track)

    - + Vampire 0 646.96860697097 @@ -213,7 +213,7 @@

    Equality+LinearArith (Parallel Track)

    - + iProver 0 00.00001030103 @@ -237,7 +237,6 @@

    Equality+LinearArith (Parallel Track)

    - + - diff --git a/archive/2023/results/equality-lineararith-proof-exhibition.html b/archive/2023/results/equality-lineararith-proof-exhibition.html index 7174c57b..2e1deeb9 100644 --- a/archive/2023/results/equality-lineararith-proof-exhibition.html +++ b/archive/2023/results/equality-lineararith-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Equality+LinearArith (Proof Exhibition Track)

    Competition results for the Equality+LinearArith - + division - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    Equality+LinearArith (Proof Exhibition Track)

    - + cvc5-lfsc 0 11785 @@ -130,7 +130,7 @@

    Equality+LinearArith (Proof Exhibition Track)

    - + cvc5 0 10850 @@ -141,7 +141,7 @@

    Equality+LinearArith (Proof Exhibition Track)

    - + SMTInterpol 0 10438 @@ -163,7 +163,7 @@

    Equality+LinearArith (Proof Exhibition Track)

    - + cvc5-lfsc 0 1178549179.47348915.5726210548 @@ -172,7 +172,7 @@

    Equality+LinearArith (Proof Exhibition Track)

    - + cvc5 0 1085046914.15646354.124155601359 @@ -181,7 +181,7 @@

    Equality+LinearArith (Proof Exhibition Track)

    - + SMTInterpol 0 10438107762.58676069.545196801844 @@ -205,7 +205,6 @@

    Equality+LinearArith (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/equality-lineararith-single-query.html b/archive/2023/results/equality-lineararith-single-query.html index d2712475..19e0de0a 100644 --- a/archive/2023/results/equality-lineararith-single-query.html +++ b/archive/2023/results/equality-lineararith-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Equality+LinearArith (Single Query Track)

    Competition results for the Equality+LinearArith - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    Equality+LinearArith (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    Equality+LinearArith (Single Query Track)

    - + 2022-cvc5n 0 11592 @@ -142,7 +142,7 @@

    Equality+LinearArith (Single Query Track)

    - + cvc5 0 11591 @@ -153,7 +153,7 @@

    Equality+LinearArith (Single Query Track)

    - + Vampire 0 10353 @@ -164,7 +164,7 @@

    Equality+LinearArith (Single Query Track)

    - + SMTInterpol 0 8719 @@ -175,7 +175,7 @@

    Equality+LinearArith (Single Query Track)

    - + iProver 0 8402 @@ -186,7 +186,7 @@

    Equality+LinearArith (Single Query Track)

    - + iProver Fixedn 0 8374 @@ -197,7 +197,7 @@

    Equality+LinearArith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 73 @@ -219,7 +219,7 @@

    Equality+LinearArith (Single Query Track)

    - + 2022-cvc5n 0 11592167275.16170197.6321159279510797394703025 @@ -228,7 +228,7 @@

    Equality+LinearArith (Single Query Track)

    - + cvc5 0 11591166052.984170390.3911159179310798394803026 @@ -237,7 +237,7 @@

    Equality+LinearArith (Single Query Track)

    - + SMTInterpol 0 8721126767.75897796.25787216008121681805062 @@ -246,7 +246,7 @@

    Equality+LinearArith (Single Query Track)

    - + iProver 0 8646876088.28225284.521864608646689306740 @@ -255,7 +255,7 @@

    Equality+LinearArith (Single Query Track)

    - + iProver Fixedn 0 8630887946.78229038.648863008630690906753 @@ -264,7 +264,7 @@

    Equality+LinearArith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 73430.425237.193731954768677807 @@ -273,7 +273,7 @@

    Equality+LinearArith (Single Query Track)

    - + Vampire 17 105441227224.389308826.581054412310421499504978 @@ -293,7 +293,7 @@

    Equality+LinearArith (Single Query Track)

    - + 2022-cvc5n 0 79536797.00237340.567795795096146483025 @@ -302,7 +302,7 @@

    Equality+LinearArith (Single Query Track)

    - + cvc5 0 79336691.0738032.019793793098146483026 @@ -311,7 +311,7 @@

    Equality+LinearArith (Single Query Track)

    - + SMTInterpol 0 600511.051353.5146006000291146485062 @@ -320,7 +320,7 @@

    Equality+LinearArith (Single Query Track)

    - + Vampire 0 1232046.715523.9551231230768146484978 @@ -329,7 +329,7 @@

    Equality+LinearArith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 19131.26570.70319190236152847 @@ -338,7 +338,7 @@

    Equality+LinearArith (Single Query Track)

    - + iProver 0 00.00.0000891146486740 @@ -347,7 +347,7 @@

    Equality+LinearArith (Single Query Track)

    - + iProver Fixedn 0 00.00.0000891146486753 @@ -367,7 +367,7 @@

    Equality+LinearArith (Single Query Track)

    - + cvc5 0 10798129361.914132358.3721079801079830344383026 @@ -376,7 +376,7 @@

    Equality+LinearArith (Single Query Track)

    - + 2022-cvc5n 0 10797130478.159132857.0661079701079730444383025 @@ -385,7 +385,7 @@

    Equality+LinearArith (Single Query Track)

    - + iProver 0 8646876088.28225284.521864608646245544386740 @@ -394,7 +394,7 @@

    Equality+LinearArith (Single Query Track)

    - + iProver Fixedn 0 8630887946.78229038.648863008630247144386753 @@ -403,7 +403,7 @@

    Equality+LinearArith (Single Query Track)

    - + SMTInterpol 0 8121126256.70897442.743812108121298044385062 @@ -412,7 +412,7 @@

    Equality+LinearArith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 54299.161166.49540544927105587 @@ -421,7 +421,7 @@

    Equality+LinearArith (Single Query Track)

    - + Vampire 17 104211225177.674308302.6251042101042168044384978 @@ -441,7 +441,7 @@

    Equality+LinearArith (Single Query Track)

    - + cvc5 0 108863787.7473798.2551088668710199465304064 @@ -450,7 +450,7 @@

    Equality+LinearArith (Single Query Track)

    - + 2022-cvc5n 0 108804859.0124837.451088069310187465904074 @@ -459,7 +459,7 @@

    Equality+LinearArith (Single Query Track)

    - + Vampire 0 858832572.6419114.68785881118477695106951 @@ -468,7 +468,7 @@

    Equality+LinearArith (Single Query Track)

    - + SMTInterpol 0 833229780.35513367.85783326007732720705616 @@ -477,7 +477,7 @@

    Equality+LinearArith (Single Query Track)

    - + iProver 0 746545480.80814469.932746507465807407966 @@ -486,7 +486,7 @@

    Equality+LinearArith (Single Query Track)

    - + iProver Fixedn 0 745545011.5914426.127745507455808407975 @@ -495,7 +495,7 @@

    Equality+LinearArith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 73430.425237.19373195476867780179 @@ -519,7 +519,6 @@

    Equality+LinearArith (Single Query Track)

    - + - diff --git a/archive/2023/results/equality-lineararith-unsat-core.html b/archive/2023/results/equality-lineararith-unsat-core.html index 595eebeb..5ac6c491 100644 --- a/archive/2023/results/equality-lineararith-unsat-core.html +++ b/archive/2023/results/equality-lineararith-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Equality+LinearArith (Unsat Core Track)

    Competition results for the Equality+LinearArith - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    Equality+LinearArith (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    Equality+LinearArith (Unsat Core Track)

    - + 2021-cvc5-ucn 0 1476115 @@ -137,7 +137,7 @@

    Equality+LinearArith (Unsat Core Track)

    - + cvc5 0 1337345 @@ -148,7 +148,7 @@

    Equality+LinearArith (Unsat Core Track)

    - + SMTInterpol 0 1074292 @@ -159,7 +159,7 @@

    Equality+LinearArith (Unsat Core Track)

    - + Vampire 0 703744 @@ -170,7 +170,7 @@

    Equality+LinearArith (Unsat Core Track)

    - + UltimateEliminator+MathSAT 5 6718 @@ -192,7 +192,7 @@

    Equality+LinearArith (Unsat Core Track)

    - + 2021-cvc5-ucn 0 147611526569.75626533.48732 @@ -201,7 +201,7 @@

    Equality+LinearArith (Unsat Core Track)

    - + cvc5 0 133734548270.25248162.886701 @@ -210,7 +210,7 @@

    Equality+LinearArith (Unsat Core Track)

    - + SMTInterpol 0 1075375170725.774128201.8973128 @@ -219,7 +219,7 @@

    Equality+LinearArith (Unsat Core Track)

    - + Vampire 0 70487583714.5521855.87680 @@ -228,7 +228,7 @@

    Equality+LinearArith (Unsat Core Track)

    - + UltimateEliminator+MathSAT 5 67183239.3252076.02914 @@ -252,7 +252,6 @@

    Equality+LinearArith (Unsat Core Track)

    - + - diff --git a/archive/2023/results/equality-machinearith-incremental.html b/archive/2023/results/equality-machinearith-incremental.html index f4581aff..0f2c13ff 100644 --- a/archive/2023/results/equality-machinearith-incremental.html +++ b/archive/2023/results/equality-machinearith-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Equality+MachineArith (Incremental Track)

    Competition results for the Equality+MachineArith - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    Equality+MachineArith (Incremental Track)

    Parallel Performance Bitwuzla - - + + @@ -124,7 +124,7 @@

    Equality+MachineArith (Incremental Track)

    - + Bitwuzla 0 2269228.67229.03000 @@ -133,7 +133,7 @@

    Equality+MachineArith (Incremental Track)

    - + cvc5 0 8189.389.91145101 @@ -142,7 +142,7 @@

    Equality+MachineArith (Incremental Track)

    - + 2022-UltimateEliminator+MathSATn 0 818105.5580.53145100 @@ -151,7 +151,7 @@

    Equality+MachineArith (Incremental Track)

    - + UltimateEliminator+MathSAT 0 27932.4515.55199000 @@ -175,7 +175,6 @@

    Equality+MachineArith (Incremental Track)

    - + - diff --git a/archive/2023/results/equality-machinearith-proof-exhibition.html b/archive/2023/results/equality-machinearith-proof-exhibition.html index 69eca905..195a7c0e 100644 --- a/archive/2023/results/equality-machinearith-proof-exhibition.html +++ b/archive/2023/results/equality-machinearith-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Equality+MachineArith (Proof Exhibition Track)

    Competition results for the Equality+MachineArith - + division - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    Equality+MachineArith (Proof Exhibition Track)

    - + cvc5-lfsc 0 621 @@ -130,7 +130,7 @@

    Equality+MachineArith (Proof Exhibition Track)

    - + cvc5 0 387 @@ -152,7 +152,7 @@

    Equality+MachineArith (Proof Exhibition Track)

    - + cvc5-lfsc 0 62123411.70423380.81313850459 @@ -161,7 +161,7 @@

    Equality+MachineArith (Proof Exhibition Track)

    - + cvc5 0 3872190.4482156.98316190692 @@ -185,7 +185,6 @@

    Equality+MachineArith (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/equality-machinearith-single-query.html b/archive/2023/results/equality-machinearith-single-query.html index 0cda12e2..ef4906ef 100644 --- a/archive/2023/results/equality-machinearith-single-query.html +++ b/archive/2023/results/equality-machinearith-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Equality+MachineArith (Single Query Track)

    Competition results for the Equality+MachineArith - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    Equality+MachineArith (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    Equality+MachineArith (Single Query Track)

    - + cvc5 0 1599 @@ -142,7 +142,7 @@

    Equality+MachineArith (Single Query Track)

    - + 2022-z3-4.8.17n 0 921 @@ -153,7 +153,7 @@

    Equality+MachineArith (Single Query Track)

    - + Bitwuzla Fixedn 0 747 @@ -164,7 +164,7 @@

    Equality+MachineArith (Single Query Track)

    - + Bitwuzla 0 746 @@ -175,7 +175,7 @@

    Equality+MachineArith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 21 @@ -186,7 +186,7 @@

    Equality+MachineArith (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 7 115 @@ -208,7 +208,7 @@

    Equality+MachineArith (Single Query Track)

    - + cvc5 0 1599140298.055147567.831599672927294001928 @@ -217,7 +217,7 @@

    Equality+MachineArith (Single Query Track)

    - + 2022-z3-4.8.17n 0 92132357.28532345.9921602319361711488 @@ -226,7 +226,7 @@

    Equality+MachineArith (Single Query Track)

    - + Bitwuzla Fixedn 0 74738059.48837974.3057472704772841951360 @@ -235,7 +235,7 @@

    Equality+MachineArith (Single Query Track)

    - + Bitwuzla 0 74637675.45537610.7617462704762842951361 @@ -244,7 +244,7 @@

    Equality+MachineArith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 211279.1991115.252211203775743393 @@ -253,7 +253,7 @@

    Equality+MachineArith (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 7 1153643.8372522.80611521944228196686 @@ -273,7 +273,7 @@

    Equality+MachineArith (Single Query Track)

    - + cvc5 0 67281276.6285531.904672672047433931928 @@ -282,7 +282,7 @@

    Equality+MachineArith (Single Query Track)

    - + 2022-z3-4.8.17n 0 60210712.94410698.369602602054433931488 @@ -291,7 +291,7 @@

    Equality+MachineArith (Single Query Track)

    - + Bitwuzla Fixedn 0 2707087.3627054.25427027006793590360 @@ -300,7 +300,7 @@

    Equality+MachineArith (Single Query Track)

    - + Bitwuzla 0 2707825.437802.24527027006793590361 @@ -309,7 +309,7 @@

    Equality+MachineArith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 112.147.1031109683570393 @@ -318,7 +318,7 @@

    Equality+MachineArith (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 7 211078.287888.472121010243494686 @@ -338,7 +338,7 @@

    Equality+MachineArith (Single Query Track)

    - + cvc5 0 92759021.43462035.925927092721034021928 @@ -347,7 +347,7 @@

    Equality+MachineArith (Single Query Track)

    - + Bitwuzla Fixedn 0 47730972.12730920.05147704774893573360 @@ -356,7 +356,7 @@

    Equality+MachineArith (Single Query Track)

    - + Bitwuzla 0 47629850.02429808.51647604764903573361 @@ -365,7 +365,7 @@

    Equality+MachineArith (Single Query Track)

    - + 2022-z3-4.8.17n 0 31921644.3421647.53319031981834021488 @@ -374,7 +374,7 @@

    Equality+MachineArith (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 0 942565.551634.3369409410073438686 @@ -383,7 +383,7 @@

    Equality+MachineArith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 201267.061108.148200209543565393 @@ -403,7 +403,7 @@

    Equality+MachineArith (Single Query Track)

    - + cvc5 0 11041756.111713.9261104329775343503367 @@ -412,7 +412,7 @@

    Equality+MachineArith (Single Query Track)

    - + 2022-z3-4.8.17n 0 822871.579855.431822571251371611699 @@ -421,7 +421,7 @@

    Equality+MachineArith (Single Query Track)

    - + Bitwuzla Fixedn 0 5751777.2831768.3995752293463013951540 @@ -430,7 +430,7 @@

    Equality+MachineArith (Single Query Track)

    - + Bitwuzla 0 5741783.5591755.055742283463014951541 @@ -439,7 +439,7 @@

    Equality+MachineArith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 19196.61896.529191183777743531 @@ -448,7 +448,7 @@

    Equality+MachineArith (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 7 1081481.819561.32810820884235196986 @@ -472,7 +472,6 @@

    Equality+MachineArith (Single Query Track)

    - + - diff --git a/archive/2023/results/equality-machinearith-unsat-core.html b/archive/2023/results/equality-machinearith-unsat-core.html index ffd0c4f7..c5ffa7fd 100644 --- a/archive/2023/results/equality-machinearith-unsat-core.html +++ b/archive/2023/results/equality-machinearith-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Equality+MachineArith (Unsat Core Track)

    Competition results for the Equality+MachineArith - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    Equality+MachineArith (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    Equality+MachineArith (Unsat Core Track)

    - + cvc5 0 151 @@ -137,7 +137,7 @@

    Equality+MachineArith (Unsat Core Track)

    - + 2022-z3-4.8.17n 0 121 @@ -148,7 +148,7 @@

    Equality+MachineArith (Unsat Core Track)

    - + Bitwuzla Fixedn 0 111 @@ -159,7 +159,7 @@

    Equality+MachineArith (Unsat Core Track)

    - + Bitwuzla 0 111 @@ -170,7 +170,7 @@

    Equality+MachineArith (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -192,7 +192,7 @@

    Equality+MachineArith (Unsat Core Track)

    - + cvc5 0 1515.2515.2341 @@ -201,7 +201,7 @@

    Equality+MachineArith (Unsat Core Track)

    - + 2022-z3-4.8.17n 0 1211438.911439.29713 @@ -210,7 +210,7 @@

    Equality+MachineArith (Unsat Core Track)

    - + Bitwuzla Fixedn 0 111134.839134.8563 @@ -219,7 +219,7 @@

    Equality+MachineArith (Unsat Core Track)

    - + Bitwuzla 0 111137.77137.8823 @@ -228,7 +228,7 @@

    Equality+MachineArith (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 00.00.00 @@ -252,7 +252,6 @@

    Equality+MachineArith (Unsat Core Track)

    - + - diff --git a/archive/2023/results/equality-nonlineararith-cloud.html b/archive/2023/results/equality-nonlineararith-cloud.html index 14abe65e..0f4b8652 100644 --- a/archive/2023/results/equality-nonlineararith-cloud.html +++ b/archive/2023/results/equality-nonlineararith-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Equality+NonLinearArith (Cloud Track)

    Competition results for the Equality+NonLinearArith - + division - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    Equality+NonLinearArith (Cloud Track)

    - + Vampire 0 4639907.0294604684-40 @@ -126,7 +126,7 @@

    Equality+NonLinearArith (Cloud Track)

    - + cvc5 0 5437.594514125-40 @@ -146,7 +146,7 @@

    Equality+NonLinearArith (Cloud Track)

    - + cvc5 0 18.4511001250 @@ -155,7 +155,7 @@

    Equality+NonLinearArith (Cloud Track)

    - + Vampire 0 00.000011250 @@ -175,7 +175,7 @@

    Equality+NonLinearArith (Cloud Track)

    - + Vampire 0 4639907.029460463770 @@ -184,7 +184,7 @@

    Equality+NonLinearArith (Cloud Track)

    - + cvc5 0 4429.14440445770 @@ -204,7 +204,7 @@

    Equality+NonLinearArith (Cloud Track)

    - + cvc5 0 323.412312127-4119 @@ -213,7 +213,7 @@

    Equality+NonLinearArith (Cloud Track)

    - + Vampire 0 00.0000130-4130 @@ -237,7 +237,6 @@

    Equality+NonLinearArith (Cloud Track)

    - + - diff --git a/archive/2023/results/equality-nonlineararith-incremental.html b/archive/2023/results/equality-nonlineararith-incremental.html index 23fdf01d..9cfea5a3 100644 --- a/archive/2023/results/equality-nonlineararith-incremental.html +++ b/archive/2023/results/equality-nonlineararith-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Equality+NonLinearArith (Incremental Track)

    Competition results for the Equality+NonLinearArith - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    Equality+NonLinearArith (Incremental Track)

    Parallel Performance cvc5 - - + + @@ -124,7 +124,7 @@

    Equality+NonLinearArith (Incremental Track)

    - + 2022-z3-4.8.17n 0 87220616409.73616642.2224401901022 @@ -133,7 +133,7 @@

    Equality+NonLinearArith (Incremental Track)

    - + cvc5 0 5804664141.3664338.452974270972 @@ -142,7 +142,7 @@

    Equality+NonLinearArith (Incremental Track)

    - + SMTInterpol 0 3798698876.2768140.0131748701045 @@ -151,7 +151,7 @@

    Equality+NonLinearArith (Incremental Track)

    - + UltimateEliminator+MathSAT 0 95643666.083577.0534516303 @@ -175,7 +175,6 @@

    Equality+NonLinearArith (Incremental Track)

    - + - diff --git a/archive/2023/results/equality-nonlineararith-parallel.html b/archive/2023/results/equality-nonlineararith-parallel.html index 4a6e131d..108f3dd2 100644 --- a/archive/2023/results/equality-nonlineararith-parallel.html +++ b/archive/2023/results/equality-nonlineararith-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Equality+NonLinearArith (Parallel Track)

    Competition results for the Equality+NonLinearArith - + division - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    Equality+NonLinearArith (Parallel Track)

    - + Vampire 0 327187.4773203269-20 @@ -126,7 +126,7 @@

    Equality+NonLinearArith (Parallel Track)

    - + iProver 0 31217.42430398-20 @@ -146,7 +146,7 @@

    Equality+NonLinearArith (Parallel Track)

    - + Vampire 0 00.00000990 @@ -155,7 +155,7 @@

    Equality+NonLinearArith (Parallel Track)

    - + iProver 0 00.00000990 @@ -175,7 +175,7 @@

    Equality+NonLinearArith (Parallel Track)

    - + Vampire 0 327187.477320322650 @@ -184,7 +184,7 @@

    Equality+NonLinearArith (Parallel Track)

    - + iProver 0 31217.42430331650 @@ -204,7 +204,7 @@

    Equality+NonLinearArith (Parallel Track)

    - + iProver 0 223.35420299-299 @@ -213,7 +213,7 @@

    Equality+NonLinearArith (Parallel Track)

    - + Vampire 0 00.0000101-2101 @@ -237,7 +237,6 @@

    Equality+NonLinearArith (Parallel Track)

    - + - diff --git a/archive/2023/results/equality-nonlineararith-proof-exhibition.html b/archive/2023/results/equality-nonlineararith-proof-exhibition.html index d8b217e1..989874c6 100644 --- a/archive/2023/results/equality-nonlineararith-proof-exhibition.html +++ b/archive/2023/results/equality-nonlineararith-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Equality+NonLinearArith (Proof Exhibition Track)

    Competition results for the Equality+NonLinearArith - + division - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    Equality+NonLinearArith (Proof Exhibition Track)

    - + cvc5-lfsc 0 4175 @@ -130,7 +130,7 @@

    Equality+NonLinearArith (Proof Exhibition Track)

    - + cvc5 0 3089 @@ -152,7 +152,7 @@

    Equality+NonLinearArith (Proof Exhibition Track)

    - + cvc5-lfsc 0 417548863.63348535.7985610558 @@ -161,7 +161,7 @@

    Equality+NonLinearArith (Proof Exhibition Track)

    - + cvc5 0 308953502.76352735.837164701406 @@ -185,7 +185,6 @@

    Equality+NonLinearArith (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/equality-nonlineararith-single-query.html b/archive/2023/results/equality-nonlineararith-single-query.html index 7bc82b35..3336ecd2 100644 --- a/archive/2023/results/equality-nonlineararith-single-query.html +++ b/archive/2023/results/equality-nonlineararith-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Equality+NonLinearArith (Single Query Track)

    Competition results for the Equality+NonLinearArith - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    Equality+NonLinearArith (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + cvc5 0 6256 @@ -142,7 +142,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + 2022-cvc5n 0 6219 @@ -153,7 +153,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + Vampire 0 4889 @@ -164,7 +164,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + iProver 0 3620 @@ -175,7 +175,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + iProver Fixedn 0 3591 @@ -186,7 +186,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 608 @@ -208,7 +208,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + cvc5 0 6256127760.327131004.52762567455511329503137 @@ -217,7 +217,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + 2022-cvc5n 0 6219142048.316144583.562621974154783276563166 @@ -226,7 +226,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + Vampire 0 52391252881.755315641.715523905239431204254 @@ -235,7 +235,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + iProver 0 3856662175.104170035.179385603856569505695 @@ -244,7 +244,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + iProver Fixedn 0 3828610036.105156538.001382803828572305723 @@ -253,7 +253,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 60811637.28810430.45460842618260302913531 @@ -273,7 +273,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + cvc5 0 74523882.18123928.2627457450787993137 @@ -282,7 +282,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + 2022-cvc5n 0 74129139.89929266.6397417410888023166 @@ -291,7 +291,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 42610386.5669536.19342642603268799531 @@ -300,7 +300,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + Vampire 0 00.00.000075287994254 @@ -309,7 +309,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + iProver 0 00.00.000075287995695 @@ -318,7 +318,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + iProver Fixedn 0 00.00.000075287995723 @@ -338,7 +338,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + cvc5 0 5511103878.146107076.26555110551133237083137 @@ -347,7 +347,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + 2022-cvc5n 0 5478112908.417115316.92454780547836437093166 @@ -356,7 +356,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + Vampire 0 52391252881.755315641.71552390523960437084254 @@ -365,7 +365,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + iProver 0 3856662175.104170035.179385603856198737085695 @@ -374,7 +374,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + iProver Fixedn 0 3828610036.105156538.001382803828201537085723 @@ -383,7 +383,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 1821250.723894.261182018230636306531 @@ -403,7 +403,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + cvc5 0 55973046.0153006.58855976914906395403804 @@ -412,7 +412,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + 2022-cvc5n 0 55243859.533801.6552467648483971563868 @@ -421,7 +421,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + Vampire 0 350824421.96507.436350803508604305987 @@ -430,7 +430,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + iProver Fixedn 0 315925007.1637676.112315903159639206392 @@ -439,7 +439,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + iProver 0 315625530.0517772.705315603156639506395 @@ -448,7 +448,7 @@

    Equality+NonLinearArith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 5694118.0923003.9856938918060692913639 @@ -472,7 +472,6 @@

    Equality+NonLinearArith (Single Query Track)

    - + - diff --git a/archive/2023/results/equality-nonlineararith-unsat-core.html b/archive/2023/results/equality-nonlineararith-unsat-core.html index cc6c2ac7..93f928c8 100644 --- a/archive/2023/results/equality-nonlineararith-unsat-core.html +++ b/archive/2023/results/equality-nonlineararith-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Equality+NonLinearArith (Unsat Core Track)

    Competition results for the Equality+NonLinearArith - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    Equality+NonLinearArith (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    Equality+NonLinearArith (Unsat Core Track)

    - + 2020-CVC4-ucn 0 218759 @@ -137,7 +137,7 @@

    Equality+NonLinearArith (Unsat Core Track)

    - + cvc5 0 218160 @@ -148,7 +148,7 @@

    Equality+NonLinearArith (Unsat Core Track)

    - + Vampire 0 58059 @@ -159,7 +159,7 @@

    Equality+NonLinearArith (Unsat Core Track)

    - + UltimateEliminator+MathSAT 1 496 @@ -181,7 +181,7 @@

    Equality+NonLinearArith (Unsat Core Track)

    - + 2020-CVC4-ucn 0 21875947036.18646883.66791 @@ -190,7 +190,7 @@

    Equality+NonLinearArith (Unsat Core Track)

    - + cvc5 0 21816087785.91287746.161679 @@ -199,7 +199,7 @@

    Equality+NonLinearArith (Unsat Core Track)

    - + Vampire 0 586086861.8571784.963598 @@ -208,7 +208,7 @@

    Equality+NonLinearArith (Unsat Core Track)

    - + UltimateEliminator+MathSAT 1 4964462.2213401.95212 @@ -232,7 +232,6 @@

    Equality+NonLinearArith (Unsat Core Track)

    - + - diff --git a/archive/2023/results/equality-parallel.html b/archive/2023/results/equality-parallel.html index fff3b9b2..1f7d3f32 100644 --- a/archive/2023/results/equality-parallel.html +++ b/archive/2023/results/equality-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Equality (Parallel Track)

    Competition results for the Equality - + division - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    Equality (Parallel Track)

    - + iProver 0 6390.65861510300 @@ -126,7 +126,7 @@

    Equality (Parallel Track)

    - + Vampire 3 444458.1354420246500 @@ -146,7 +146,7 @@

    Equality (Parallel Track)

    - + Vampire 0 201924.343202005840 @@ -155,7 +155,7 @@

    Equality (Parallel Track)

    - + iProver 0 1194.44411024840 @@ -175,7 +175,7 @@

    Equality (Parallel Track)

    - + iProver 0 5196.21450520840 @@ -184,7 +184,7 @@

    Equality (Parallel Track)

    - + Vampire 3 242533.792240241840 @@ -204,7 +204,7 @@

    Equality (Parallel Track)

    - + iProver 0 111.1831011080108 @@ -213,7 +213,7 @@

    Equality (Parallel Track)

    - + Vampire 2 1478.7631431195093 @@ -237,7 +237,6 @@

    Equality (Parallel Track)

    - + - diff --git a/archive/2023/results/equality-proof-exhibition.html b/archive/2023/results/equality-proof-exhibition.html index a3232cc4..9e678a17 100644 --- a/archive/2023/results/equality-proof-exhibition.html +++ b/archive/2023/results/equality-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Equality (Proof Exhibition Track)

    Competition results for the Equality - + division - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    Equality (Proof Exhibition Track)

    - + cvc5-lfsc 0 1411 @@ -130,7 +130,7 @@

    Equality (Proof Exhibition Track)

    - + cvc5 0 1390 @@ -141,7 +141,7 @@

    Equality (Proof Exhibition Track)

    - + SMTInterpol 0 817 @@ -163,7 +163,7 @@

    Equality (Proof Exhibition Track)

    - + cvc5-lfsc 0 141111341.00311328.0524690461 @@ -172,7 +172,7 @@

    Equality (Proof Exhibition Track)

    - + cvc5 0 139011607.47611570.9624900474 @@ -181,7 +181,7 @@

    Equality (Proof Exhibition Track)

    - + SMTInterpol 0 81752037.35437327.912106301018 @@ -205,7 +205,6 @@

    Equality (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/equality-single-query.html b/archive/2023/results/equality-single-query.html index a0389b1c..fa77e5c9 100644 --- a/archive/2023/results/equality-single-query.html +++ b/archive/2023/results/equality-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Equality (Single Query Track)

    Competition results for the Equality - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    Equality (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + Vampire - + @@ -131,7 +131,7 @@

    Equality (Single Query Track)

    - + cvc5 0 1735 @@ -142,7 +142,7 @@

    Equality (Single Query Track)

    - + 2022-cvc5n 0 1721 @@ -153,7 +153,7 @@

    Equality (Single Query Track)

    - + iProver Fixedn 0 914 @@ -164,7 +164,7 @@

    Equality (Single Query Track)

    - + iProver 0 869 @@ -175,7 +175,7 @@

    Equality (Single Query Track)

    - + Yices2 0 343 @@ -186,7 +186,7 @@

    Equality (Single Query Track)

    - + SMTInterpol 0 318 @@ -197,7 +197,7 @@

    Equality (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -208,7 +208,7 @@

    Equality (Single Query Track)

    - + Vampire 79 1674 @@ -230,7 +230,7 @@

    Equality (Single Query Track)

    - + cvc5 0 1735298080.055310223.55717355291206267202672 @@ -239,7 +239,7 @@

    Equality (Single Query Track)

    - + 2022-cvc5n 0 1721295570.73302864.92817215281193268602686 @@ -248,7 +248,7 @@

    Equality (Single Query Track)

    - + Yices2 0 34311422.42711419.2534330313251415502514 @@ -257,7 +257,7 @@

    Equality (Single Query Track)

    - + SMTInterpol 0 32441536.16429541.3732413311408303919 @@ -266,7 +266,7 @@

    Equality (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.0000285715500 @@ -275,7 +275,7 @@

    Equality (Single Query Track)

    - + iProver Fixedn 9 1202513330.566130769.2131202221981320503170 @@ -284,7 +284,7 @@

    Equality (Single Query Track)

    - + iProver 9 982275241.88270205.4329820982342503375 @@ -293,7 +293,7 @@

    Equality (Single Query Track)

    - + Vampire 79 1808493980.853124471.70218085031305259902520 @@ -313,7 +313,7 @@

    Equality (Single Query Track)

    - + cvc5 0 529256016.159265960.496529529010137772672 @@ -322,7 +322,7 @@

    Equality (Single Query Track)

    - + 2022-cvc5n 0 528254941.423261688.784528528010237772686 @@ -331,7 +331,7 @@

    Equality (Single Query Track)

    - + Vampire 0 503116726.95529420.585503503012737772520 @@ -340,7 +340,7 @@

    Equality (Single Query Track)

    - + iProver Fixedn 0 221245570.08162357.353221221040937773170 @@ -349,7 +349,7 @@

    Equality (Single Query Track)

    - + Yices2 0 308.6968.7883030045439232514 @@ -358,7 +358,7 @@

    Equality (Single Query Track)

    - + SMTInterpol 0 13277.472231.2331313061737773919 @@ -367,7 +367,7 @@

    Equality (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.000048439230 @@ -376,7 +376,7 @@

    Equality (Single Query Track)

    - + iProver 0 00.00.000063037773375 @@ -396,7 +396,7 @@

    Equality (Single Query Track)

    - + cvc5 0 120642063.89644263.0612060120629129102672 @@ -405,7 +405,7 @@

    Equality (Single Query Track)

    - + 2022-cvc5n 0 119340629.30741176.14411930119330429102686 @@ -414,7 +414,7 @@

    Equality (Single Query Track)

    - + Yices2 0 31311413.73111410.462313031358235122514 @@ -423,7 +423,7 @@

    Equality (Single Query Track)

    - + SMTInterpol 0 31141258.69129310.1373110311118629103919 @@ -432,7 +432,7 @@

    Equality (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.000089535120 @@ -441,7 +441,7 @@

    Equality (Single Query Track)

    - + iProver 9 982275241.88270205.432982098251529103375 @@ -450,7 +450,7 @@

    Equality (Single Query Track)

    - + iProver Fixedn 9 981267760.48468411.859981098151629103170 @@ -459,7 +459,7 @@

    Equality (Single Query Track)

    - + Vampire 79 1305377253.89895051.11713050130519229102520 @@ -479,7 +479,7 @@

    Equality (Single Query Track)

    - + Vampire 0 123710508.5682797.0461237355882317003170 @@ -488,7 +488,7 @@

    Equality (Single Query Track)

    - + cvc5 0 1014988.946983.41101416998339303393 @@ -497,7 +497,7 @@

    Equality (Single Query Track)

    - + 2022-cvc5n 0 993921.333917.71899319974341403414 @@ -506,7 +506,7 @@

    Equality (Single Query Track)

    - + iProver Fixedn 0 71210696.7952945.32871234678369503695 @@ -515,7 +515,7 @@

    Equality (Single Query Track)

    - + iProver 0 6719901.612761.8736710671373603722 @@ -524,7 +524,7 @@

    Equality (Single Query Track)

    - + Yices2 0 290534.986530.37829030260256715502567 @@ -533,7 +533,7 @@

    Equality (Single Query Track)

    - + SMTInterpol 0 2022069.977937.49720211191420504159 @@ -542,7 +542,7 @@

    Equality (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.00002857155090 @@ -566,7 +566,6 @@

    Equality (Single Query Track)

    - + - diff --git a/archive/2023/results/equality-unsat-core.html b/archive/2023/results/equality-unsat-core.html index 975b2893..ba5b7e65 100644 --- a/archive/2023/results/equality-unsat-core.html +++ b/archive/2023/results/equality-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    Equality (Unsat Core Track)

    Competition results for the Equality - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    Equality (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    Equality (Unsat Core Track)

    - + cvc5 0 824799 @@ -137,7 +137,7 @@

    Equality (Unsat Core Track)

    - + 2020-CVC4-ucn 0 820740 @@ -148,7 +148,7 @@

    Equality (Unsat Core Track)

    - + 2022-Vampiren 0 817671 @@ -159,7 +159,7 @@

    Equality (Unsat Core Track)

    - + SMTInterpol 0 496088 @@ -170,7 +170,7 @@

    Equality (Unsat Core Track)

    - + Yices2 0 0 @@ -181,7 +181,7 @@

    Equality (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -192,7 +192,7 @@

    Equality (Unsat Core Track)

    - + Vampire 121 729488 @@ -214,7 +214,7 @@

    Equality (Unsat Core Track)

    - + 2022-Vampiren 0 840597276597.08369664.046382 @@ -223,7 +223,7 @@

    Equality (Unsat Core Track)

    - + cvc5 0 82479912178.71512131.701548 @@ -232,7 +232,7 @@

    Equality (Unsat Core Track)

    - + 2020-CVC4-ucn 0 82074015354.40515310.79571 @@ -241,7 +241,7 @@

    Equality (Unsat Core Track)

    - + SMTInterpol 0 499589104623.01175506.8891498 @@ -250,7 +250,7 @@

    Equality (Unsat Core Track)

    - + Yices2 0 00.00.00 @@ -259,7 +259,7 @@

    Equality (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 00.00.00 @@ -268,7 +268,7 @@

    Equality (Unsat Core Track)

    - + Vampire 121 750231318954.82480494.067181 @@ -292,7 +292,6 @@

    Equality (Unsat Core Track)

    - + - diff --git a/archive/2023/results/fp-proof-exhibition.html b/archive/2023/results/fp-proof-exhibition.html index 17843fc6..6d7d6cae 100644 --- a/archive/2023/results/fp-proof-exhibition.html +++ b/archive/2023/results/fp-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    FP (Proof Exhibition Track)

    Competition results for the FP - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    FP (Proof Exhibition Track)

    - + cvc5-lfsc 0 900 @@ -130,7 +130,7 @@

    FP (Proof Exhibition Track)

    - + cvc5 0 393 @@ -152,7 +152,7 @@

    FP (Proof Exhibition Track)

    - + cvc5-lfsc 0 90033694.86133558.464239238 @@ -161,7 +161,7 @@

    FP (Proof Exhibition Track)

    - + cvc5 0 3931690.0461678.418746744 @@ -185,7 +185,6 @@

    FP (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/fp-single-query.html b/archive/2023/results/fp-single-query.html index bf2052da..f645f913 100644 --- a/archive/2023/results/fp-single-query.html +++ b/archive/2023/results/fp-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    FP (Single Query Track)

    Competition results for the FP - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    FP (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) BitwuzlaBitwuzlaBitwuzla - - + + Bitwuzla - - + + Bitwuzla - + @@ -131,7 +131,7 @@

    FP (Single Query Track)

    - + 2022-Bitwuzlan 0 1254 @@ -142,7 +142,7 @@

    FP (Single Query Track)

    - + Bitwuzla 0 1235 @@ -153,7 +153,7 @@

    FP (Single Query Track)

    - + Bitwuzla Fixedn 0 1235 @@ -164,7 +164,7 @@

    FP (Single Query Track)

    - + cvc5 0 1168 @@ -175,7 +175,7 @@

    FP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 175 @@ -197,7 +197,7 @@

    FP (Single Query Track)

    - + 2022-Bitwuzlan 0 125429722.40529696.886125411911358080 @@ -206,7 +206,7 @@

    FP (Single Query Track)

    - + Bitwuzla Fixedn 0 123529895.7529845.092123512211139999 @@ -215,7 +215,7 @@

    FP (Single Query Track)

    - + Bitwuzla 0 123529894.03429848.482123512111149999 @@ -224,7 +224,7 @@

    FP (Single Query Track)

    - + cvc5 0 116844192.68546098.5421168981070166143 @@ -233,7 +233,7 @@

    FP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 175846.697612.4831752173115925 @@ -253,7 +253,7 @@

    FP (Single Query Track)

    - + Bitwuzla Fixedn 0 12216963.36216915.62212212200121299 @@ -262,7 +262,7 @@

    FP (Single Query Track)

    - + Bitwuzla 0 12115742.45515743.33812112101121299 @@ -271,7 +271,7 @@

    FP (Single Query Track)

    - + 2022-Bitwuzlan 0 11915684.67915663.59711911903121280 @@ -280,7 +280,7 @@

    FP (Single Query Track)

    - + cvc5 0 9818646.65518725.38298980241212143 @@ -289,7 +289,7 @@

    FP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 212.688.928220120121225 @@ -309,7 +309,7 @@

    FP (Single Query Track)

    - + 2022-Bitwuzlan 0 113514037.72614033.2891135011351318680 @@ -318,7 +318,7 @@

    FP (Single Query Track)

    - + Bitwuzla 0 111414151.57914105.1441114011143418699 @@ -327,7 +327,7 @@

    FP (Single Query Track)

    - + Bitwuzla Fixedn 0 111312932.38812929.4691113011133518699 @@ -336,7 +336,7 @@

    FP (Single Query Track)

    - + cvc5 0 107025546.0327373.1610700107078186143 @@ -345,7 +345,7 @@

    FP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 173834.017603.555173017397518625 @@ -365,7 +365,7 @@

    FP (Single Query Track)

    - + 2022-Bitwuzlan 0 1116976.31946.6981116401076218218 @@ -374,7 +374,7 @@

    FP (Single Query Track)

    - + Bitwuzla 0 10941053.0291047.631094341060240240 @@ -383,7 +383,7 @@

    FP (Single Query Track)

    - + Bitwuzla Fixedn 0 10941053.8811048.4591094341060240240 @@ -392,7 +392,7 @@

    FP (Single Query Track)

    - + cvc5 0 10291932.6981922.721102947982305305 @@ -401,7 +401,7 @@

    FP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 174841.935508.5321742172116030 @@ -425,7 +425,6 @@

    FP (Single Query Track)

    - + - diff --git a/archive/2023/results/fparith-incremental.html b/archive/2023/results/fparith-incremental.html index 99487b23..30cc9e1b 100644 --- a/archive/2023/results/fparith-incremental.html +++ b/archive/2023/results/fparith-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    FPArith (Incremental Track)

    Competition results for the FPArith - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    FPArith (Incremental Track)

    Parallel Performance Bitwuzla - - + + @@ -124,7 +124,7 @@

    FPArith (Incremental Track)

    - + Bitwuzla 0 5255534.89535.6380002 @@ -133,7 +133,7 @@

    FPArith (Incremental Track)

    - + 2022-Bitwuzlan 0 3429177.17178.0262606 @@ -142,7 +142,7 @@

    FPArith (Incremental Track)

    - + cvc5 0 319551.9552.97286007 @@ -151,7 +151,7 @@

    FPArith (Incremental Track)

    - + UltimateEliminator+MathSAT 0 2388240.0173.42366700 @@ -175,7 +175,6 @@

    FPArith (Incremental Track)

    - + - diff --git a/archive/2023/results/fparith-proof-exhibition.html b/archive/2023/results/fparith-proof-exhibition.html index d881a97c..c3e34292 100644 --- a/archive/2023/results/fparith-proof-exhibition.html +++ b/archive/2023/results/fparith-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    FPArith (Proof Exhibition Track)

    Competition results for the FPArith - + division - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    FPArith (Proof Exhibition Track)

    - + cvc5-lfsc 0 907 @@ -130,7 +130,7 @@

    FPArith (Proof Exhibition Track)

    - + cvc5 0 395 @@ -152,7 +152,7 @@

    FPArith (Proof Exhibition Track)

    - + cvc5-lfsc 0 90734751.89934325.6392700259 @@ -161,7 +161,7 @@

    FPArith (Proof Exhibition Track)

    - + cvc5 0 3951691.7721680.1427820770 @@ -185,7 +185,6 @@

    FPArith (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/fparith-single-query.html b/archive/2023/results/fparith-single-query.html index 7d1362df..be754c51 100644 --- a/archive/2023/results/fparith-single-query.html +++ b/archive/2023/results/fparith-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    FPArith (Single Query Track)

    Competition results for the FPArith - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    FPArith (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) BitwuzlaBitwuzlaBitwuzla - - + + Bitwuzla - - + + Bitwuzla - + @@ -131,7 +131,7 @@

    FPArith (Single Query Track)

    - + Bitwuzla 0 1722 @@ -142,7 +142,7 @@

    FPArith (Single Query Track)

    - + Bitwuzla Fixedn 0 1722 @@ -153,7 +153,7 @@

    FPArith (Single Query Track)

    - + 2022-Bitwuzlan 0 1605 @@ -164,7 +164,7 @@

    FPArith (Single Query Track)

    - + cvc5 0 1499 @@ -175,7 +175,7 @@

    FPArith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 291 @@ -197,7 +197,7 @@

    FPArith (Single Query Track)

    - + Bitwuzla 0 172230652.08530606.759172257111511270127 @@ -206,7 +206,7 @@

    FPArith (Single Query Track)

    - + Bitwuzla Fixedn 0 172230667.35830615.341172257211501270127 @@ -215,7 +215,7 @@

    FPArith (Single Query Track)

    - + 2022-Bitwuzlan 0 160533035.41433010.42160543211732440240 @@ -224,7 +224,7 @@

    FPArith (Single Query Track)

    - + cvc5 0 149954723.01256850.519149939811013500294 @@ -233,7 +233,7 @@

    FPArith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 2911946.5131474.106291961951558025 @@ -253,7 +253,7 @@

    FPArith (Single Query Track)

    - + Bitwuzla Fixedn 0 57217412.30317363.157572572001277127 @@ -262,7 +262,7 @@

    FPArith (Single Query Track)

    - + Bitwuzla 0 57116182.4116183.432571571011277127 @@ -271,7 +271,7 @@

    FPArith (Single Query Track)

    - + 2022-Bitwuzlan 0 43218630.52218609.89943243201401277240 @@ -280,7 +280,7 @@

    FPArith (Single Query Track)

    - + cvc5 0 39827382.99627686.33939839801741277294 @@ -289,7 +289,7 @@

    FPArith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 96970.494776.55696960476127725 @@ -309,7 +309,7 @@

    FPArith (Single Query Track)

    - + 2022-Bitwuzlan 0 117314404.89314400.52111730117313663240 @@ -318,7 +318,7 @@

    FPArith (Single Query Track)

    - + Bitwuzla 0 115114469.67514423.32711510115135663127 @@ -327,7 +327,7 @@

    FPArith (Single Query Track)

    - + Bitwuzla Fixedn 0 115013255.05513252.18311500115036663127 @@ -336,7 +336,7 @@

    FPArith (Single Query Track)

    - + cvc5 0 110127340.01629164.1811010110185663294 @@ -345,7 +345,7 @@

    FPArith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 195976.019697.55195019599166325 @@ -365,7 +365,7 @@

    FPArith (Single Query Track)

    - + Bitwuzla 0 15771191.9311186.629157748310942720272 @@ -374,7 +374,7 @@

    FPArith (Single Query Track)

    - + Bitwuzla Fixedn 0 15771194.6031187.74157748310942720272 @@ -383,7 +383,7 @@

    FPArith (Single Query Track)

    - + 2022-Bitwuzlan 0 14591148.0741118.537145934711123900386 @@ -392,7 +392,7 @@

    FPArith (Single Query Track)

    - + cvc5 0 13222139.5872123.532132231610065270494 @@ -401,7 +401,7 @@

    FPArith (Single Query Track)

    - + UltimateEliminator+MathSAT 0 2841692.0621134.426284901941565038 @@ -425,7 +425,6 @@

    FPArith (Single Query Track)

    - + - diff --git a/archive/2023/results/fparith-unsat-core.html b/archive/2023/results/fparith-unsat-core.html index e94f7258..a98b0f93 100644 --- a/archive/2023/results/fparith-unsat-core.html +++ b/archive/2023/results/fparith-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    FPArith (Unsat Core Track)

    Competition results for the FPArith - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    FPArith (Unsat Core Track)

    Sequential PerformanceParallel Performance BitwuzlaBitwuzla - - + + @@ -126,7 +126,7 @@

    FPArith (Unsat Core Track)

    - + 2020-CVC4-ucn 0 50 @@ -137,7 +137,7 @@

    FPArith (Unsat Core Track)

    - + Bitwuzla 0 50 @@ -148,7 +148,7 @@

    FPArith (Unsat Core Track)

    - + Bitwuzla Fixedn 0 50 @@ -159,7 +159,7 @@

    FPArith (Unsat Core Track)

    - + cvc5 0 29 @@ -170,7 +170,7 @@

    FPArith (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -192,7 +192,7 @@

    FPArith (Unsat Core Track)

    - + 2020-CVC4-ucn 0 5011.54811.5470 @@ -201,7 +201,7 @@

    FPArith (Unsat Core Track)

    - + Bitwuzla 0 5041.28541.3130 @@ -210,7 +210,7 @@

    FPArith (Unsat Core Track)

    - + Bitwuzla Fixedn 0 5042.05642.0610 @@ -219,7 +219,7 @@

    FPArith (Unsat Core Track)

    - + cvc5 0 2930.5230.5180 @@ -228,7 +228,7 @@

    FPArith (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 00.00.00 @@ -252,7 +252,6 @@

    FPArith (Unsat Core Track)

    - + - diff --git a/archive/2023/results/fplra-single-query.html b/archive/2023/results/fplra-single-query.html index 90f428d7..d3e24ff1 100644 --- a/archive/2023/results/fplra-single-query.html +++ b/archive/2023/results/fplra-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    FPLRA (Single Query Track)

    Competition results for the FPLRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    FPLRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) BitwuzlaBitwuzlaBitwuzla - - + + — - - + + Bitwuzla - + @@ -131,7 +131,7 @@

    FPLRA (Single Query Track)

    - + Bitwuzla 0 37 @@ -142,7 +142,7 @@

    FPLRA (Single Query Track)

    - + Bitwuzla Fixedn 0 37 @@ -153,7 +153,7 @@

    FPLRA (Single Query Track)

    - + 2022-Bitwuzlan 0 27 @@ -164,7 +164,7 @@

    FPLRA (Single Query Track)

    - + cvc5 0 23 @@ -175,7 +175,7 @@

    FPLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 14 @@ -197,7 +197,7 @@

    FPLRA (Single Query Track)

    - + Bitwuzla 0 3731.20731.2113737044 @@ -206,7 +206,7 @@

    FPLRA (Single Query Track)

    - + Bitwuzla Fixedn 0 3731.73331.7443737044 @@ -215,7 +215,7 @@

    FPLRA (Single Query Track)

    - + 2022-Bitwuzlan 0 27810.086810.146272701414 @@ -224,7 +224,7 @@

    FPLRA (Single Query Track)

    - + cvc5 0 23140.884140.879232301817 @@ -233,7 +233,7 @@

    FPLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 1494.9970.96814140270 @@ -253,7 +253,7 @@

    FPLRA (Single Query Track)

    - + Bitwuzla 0 3731.20731.21137370044 @@ -262,7 +262,7 @@

    FPLRA (Single Query Track)

    - + Bitwuzla Fixedn 0 3731.73331.74437370044 @@ -271,7 +271,7 @@

    FPLRA (Single Query Track)

    - + 2022-Bitwuzlan 0 27810.086810.1462727010414 @@ -280,7 +280,7 @@

    FPLRA (Single Query Track)

    - + cvc5 0 23140.884140.8792323014417 @@ -289,7 +289,7 @@

    FPLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 1494.9970.968141402340 @@ -309,7 +309,7 @@

    FPLRA (Single Query Track)

    - + 2022-Bitwuzlan 0 00.00.000004114 @@ -318,7 +318,7 @@

    FPLRA (Single Query Track)

    - + cvc5 0 00.00.000004117 @@ -327,7 +327,7 @@

    FPLRA (Single Query Track)

    - + Bitwuzla 0 00.00.00000414 @@ -336,7 +336,7 @@

    FPLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.00000410 @@ -345,7 +345,7 @@

    FPLRA (Single Query Track)

    - + Bitwuzla Fixedn 0 00.00.00000414 @@ -365,7 +365,7 @@

    FPLRA (Single Query Track)

    - + Bitwuzla 0 3731.20731.2113737044 @@ -374,7 +374,7 @@

    FPLRA (Single Query Track)

    - + Bitwuzla Fixedn 0 3731.73331.7443737044 @@ -383,7 +383,7 @@

    FPLRA (Single Query Track)

    - + 2022-Bitwuzlan 0 2527.01427.02252501616 @@ -392,7 +392,7 @@

    FPLRA (Single Query Track)

    - + cvc5 0 229.2729.262222201918 @@ -401,7 +401,7 @@

    FPLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 1494.9970.96814140270 @@ -425,7 +425,6 @@

    FPLRA (Single Query Track)

    - + - diff --git a/archive/2023/results/largest-contribution-incremental.html b/archive/2023/results/largest-contribution-incremental.html index 6af7daf6..d72ecb22 100644 --- a/archive/2023/results/largest-contribution-incremental.html +++ b/archive/2023/results/largest-contribution-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -90,27 +90,27 @@

    Winners

    - - + + - - - - + + + + - - + + - - - - + + + +
    Parallel Performance
    - + cvc5 - +
    @@ -130,7 +130,7 @@

    Parallel Performance

    - + cvc5 0.09119607 @@ -144,7 +144,7 @@

    Parallel Performance

    - + cvc5 0.06845721 @@ -158,7 +158,7 @@

    Parallel Performance

    - + Yices2 0.02804122 @@ -172,7 +172,7 @@

    Parallel Performance

    - + cvc5 0.00140228 @@ -186,7 +186,7 @@

    Parallel Performance

    - + Yices2 0.00102606 @@ -200,7 +200,7 @@

    Parallel Performance

    - + Bitwuzla 0.00095845 @@ -214,7 +214,7 @@

    Parallel Performance

    - + Bitwuzla 0.00076854 @@ -228,7 +228,7 @@

    Parallel Performance

    - + SMTInterpol 0.00049581 @@ -242,7 +242,7 @@

    Parallel Performance

    - + Bitwuzla 0.00027731 @@ -256,7 +256,7 @@

    Parallel Performance

    - + Bitwuzla 0.00018095 @@ -270,7 +270,7 @@

    Parallel Performance

    - + cvc5 0.00011741 @@ -284,7 +284,7 @@

    Parallel Performance

    - + OpenSMT 0.00010723 @@ -298,7 +298,7 @@

    Parallel Performance

    - + Yices2 7.64e-06 @@ -312,7 +312,7 @@

    Parallel Performance

    - + cvc5 0.0 @@ -326,7 +326,7 @@

    Parallel Performance

    - + cvc5 0.0 @@ -364,7 +364,6 @@

    Parallel Performance

    - + - diff --git a/archive/2023/results/largest-contribution-model-validation.html b/archive/2023/results/largest-contribution-model-validation.html index 3153f414..06ffd84d 100644 --- a/archive/2023/results/largest-contribution-model-validation.html +++ b/archive/2023/results/largest-contribution-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -90,35 +90,35 @@

    Winners

    - + - - + + - - - - + + + + - + - - + + - - - - + + + +
    Sequential PerformanceParallel Performance
    - + Z3++ - + - + Z3++ - +

    Sequential Performance

    @@ -136,7 +136,7 @@

    Sequential Performance

    - + Bitwuzla 0.00810667 @@ -150,7 +150,7 @@

    Sequential Performance

    - + Z3++ 0.00474549 @@ -164,7 +164,7 @@

    Sequential Performance

    - + Z3++ 0.00388048 @@ -178,7 +178,7 @@

    Sequential Performance

    - + Z3++ 0.00183232 @@ -192,7 +192,7 @@

    Sequential Performance

    - + SMTInterpol 0.00132293 @@ -206,7 +206,7 @@

    Sequential Performance

    - + OpenSMT 0.00026185 @@ -220,7 +220,7 @@

    Sequential Performance

    - + STP 0.00026064 @@ -234,7 +234,7 @@

    Sequential Performance

    - + OpenSMT 0.00014552 @@ -248,7 +248,7 @@

    Sequential Performance

    - + Bitwuzla 8.662e-05 @@ -262,7 +262,7 @@

    Sequential Performance

    - + Yices2 0.0 @@ -291,7 +291,7 @@

    Parallel Performance

    - + Bitwuzla 0.00810667 @@ -305,7 +305,7 @@

    Parallel Performance

    - + Z3++ 0.00474549 @@ -319,7 +319,7 @@

    Parallel Performance

    - + Z3++ 0.00388048 @@ -333,7 +333,7 @@

    Parallel Performance

    - + Z3++ 0.00183232 @@ -347,7 +347,7 @@

    Parallel Performance

    - + SMTInterpol 0.00132293 @@ -361,7 +361,7 @@

    Parallel Performance

    - + OpenSMT 0.00026157 @@ -375,7 +375,7 @@

    Parallel Performance

    - + STP 0.00026064 @@ -389,7 +389,7 @@

    Parallel Performance

    - + OpenSMT 0.00014552 @@ -403,7 +403,7 @@

    Parallel Performance

    - + Bitwuzla 8.662e-05 @@ -417,7 +417,7 @@

    Parallel Performance

    - + Yices2 0.0 @@ -455,7 +455,6 @@

    Parallel Performance

    - + - diff --git a/archive/2023/results/largest-contribution-single-query.html b/archive/2023/results/largest-contribution-single-query.html index 5f3decc0..002550c9 100644 --- a/archive/2023/results/largest-contribution-single-query.html +++ b/archive/2023/results/largest-contribution-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -90,59 +90,59 @@

    Winners

    - + - - + + - - + + - - + + - - + + - + - + - - + + - - + + - - + + - - + + - +
    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24 seconds Performance (parallel)
    - + cvc5 - + - + cvc5 - + - + cvc5 - + - + cvc5 - + - + cvc5 - +

    Sequential Performance

    @@ -160,7 +160,7 @@

    Sequential Performance

    - + cvc5 0.03146325 @@ -174,7 +174,7 @@

    Sequential Performance

    - + cvc5 0.02855388 @@ -188,7 +188,7 @@

    Sequential Performance

    - + cvc5 0.01566994 @@ -202,7 +202,7 @@

    Sequential Performance

    - + cvc5 0.01416832 @@ -216,7 +216,7 @@

    Sequential Performance

    - + Z3++ 0.00262965 @@ -230,7 +230,7 @@

    Sequential Performance

    - + Bitwuzla 0.00200413 @@ -244,7 +244,7 @@

    Sequential Performance

    - + YicesQS 0.00186045 @@ -258,7 +258,7 @@

    Sequential Performance

    - + Bitwuzla 0.00118404 @@ -272,7 +272,7 @@

    Sequential Performance

    - + Z3++ 0.00086703 @@ -286,7 +286,7 @@

    Sequential Performance

    - + cvc5 0.00077048 @@ -300,7 +300,7 @@

    Sequential Performance

    - + cvc5 0.00065615 @@ -314,7 +314,7 @@

    Sequential Performance

    - + SMTInterpol 0.00030175 @@ -328,7 +328,7 @@

    Sequential Performance

    - + Z3++ 0.0002899 @@ -342,7 +342,7 @@

    Sequential Performance

    - + cvc5 0.00027515 @@ -356,7 +356,7 @@

    Sequential Performance

    - + STP 0.00024832 @@ -370,7 +370,7 @@

    Sequential Performance

    - + OpenSMT 9.377e-05 @@ -384,7 +384,7 @@

    Sequential Performance

    - + Yices2 0.0 @@ -413,7 +413,7 @@

    Parallel Performance

    - + cvc5 0.04693491 @@ -427,7 +427,7 @@

    Parallel Performance

    - + cvc5 0.03051732 @@ -441,7 +441,7 @@

    Parallel Performance

    - + cvc5 0.02855388 @@ -455,7 +455,7 @@

    Parallel Performance

    - + cvc5 0.01210123 @@ -469,7 +469,7 @@

    Parallel Performance

    - + Z3++ 0.00262965 @@ -483,7 +483,7 @@

    Parallel Performance

    - + Bitwuzla 0.00200413 @@ -497,7 +497,7 @@

    Parallel Performance

    - + YicesQS 0.00186045 @@ -511,7 +511,7 @@

    Parallel Performance

    - + Bitwuzla 0.00118404 @@ -525,7 +525,7 @@

    Parallel Performance

    - + Z3++ 0.00086703 @@ -539,7 +539,7 @@

    Parallel Performance

    - + cvc5 0.00077048 @@ -553,7 +553,7 @@

    Parallel Performance

    - + cvc5 0.00065615 @@ -567,7 +567,7 @@

    Parallel Performance

    - + SMTInterpol 0.00031199 @@ -581,7 +581,7 @@

    Parallel Performance

    - + Z3++ 0.0002899 @@ -595,7 +595,7 @@

    Parallel Performance

    - + cvc5 0.00027515 @@ -609,7 +609,7 @@

    Parallel Performance

    - + STP 0.00024832 @@ -623,7 +623,7 @@

    Parallel Performance

    - + OpenSMT 9.377e-05 @@ -637,7 +637,7 @@

    Parallel Performance

    - + Yices2 0.0 @@ -666,7 +666,7 @@

    SAT Performance

    - + cvc5 0.041491 @@ -680,7 +680,7 @@

    SAT Performance

    - + cvc5 0.04007647 @@ -694,7 +694,7 @@

    SAT Performance

    - + cvc5 0.03147117 @@ -708,7 +708,7 @@

    SAT Performance

    - + cvc5 0.01259513 @@ -722,7 +722,7 @@

    SAT Performance

    - + Bitwuzla 0.00426264 @@ -736,7 +736,7 @@

    SAT Performance

    - + Z3++ 0.00294951 @@ -750,7 +750,7 @@

    SAT Performance

    - + YicesQS 0.00220737 @@ -764,7 +764,7 @@

    SAT Performance

    - + Z3++ 0.00103917 @@ -778,7 +778,7 @@

    SAT Performance

    - + cvc5 0.00092473 @@ -792,7 +792,7 @@

    SAT Performance

    - + Bitwuzla 0.00077161 @@ -806,7 +806,7 @@

    SAT Performance

    - + Bitwuzla 0.00053955 @@ -820,7 +820,7 @@

    SAT Performance

    - + Z3++ 0.00051054 @@ -834,7 +834,7 @@

    SAT Performance

    - + SMTInterpol 0.00043819 @@ -848,7 +848,7 @@

    SAT Performance

    - + Bitwuzla 0.00019215 @@ -862,7 +862,7 @@

    SAT Performance

    - + Yices2 0.0001431 @@ -876,7 +876,7 @@

    SAT Performance

    - + OpenSMT 9.577e-05 @@ -890,7 +890,7 @@

    SAT Performance

    - + Yices2 0.0 @@ -919,7 +919,7 @@

    UNSAT Performance

    - + cvc5 0.04001813 @@ -933,7 +933,7 @@

    UNSAT Performance

    - + cvc5 0.0287424 @@ -947,7 +947,7 @@

    UNSAT Performance

    - + cvc5 0.02323101 @@ -961,7 +961,7 @@

    UNSAT Performance

    - + cvc5 0.00830851 @@ -975,7 +975,7 @@

    UNSAT Performance

    - + Z3++ 0.00193396 @@ -989,7 +989,7 @@

    UNSAT Performance

    - + YicesQS 0.00165128 @@ -1003,7 +1003,7 @@

    UNSAT Performance

    - + Bitwuzla 0.00142509 @@ -1017,7 +1017,7 @@

    UNSAT Performance

    - + Bitwuzla 0.00090846 @@ -1031,7 +1031,7 @@

    UNSAT Performance

    - + cvc5 0.00083223 @@ -1045,7 +1045,7 @@

    UNSAT Performance

    - + Z3++ 0.00057129 @@ -1059,7 +1059,7 @@

    UNSAT Performance

    - + Bitwuzla 0.00038434 @@ -1073,7 +1073,7 @@

    UNSAT Performance

    - + STP 0.00036137 @@ -1087,7 +1087,7 @@

    UNSAT Performance

    - + cvc5 0.0003027 @@ -1101,7 +1101,7 @@

    UNSAT Performance

    - + cvc5-NRA-LS 0.00026015 @@ -1115,7 +1115,7 @@

    UNSAT Performance

    - + OpenSMT 0.00021747 @@ -1129,7 +1129,7 @@

    UNSAT Performance

    - + OpenSMT 9.124e-05 @@ -1143,7 +1143,7 @@

    UNSAT Performance

    - + Yices2 0.0 @@ -1172,7 +1172,7 @@

    24s Performance

    - + cvc5 0.0280728 @@ -1186,7 +1186,7 @@

    24s Performance

    - + cvc5 0.0273453 @@ -1200,7 +1200,7 @@

    24s Performance

    - + cvc5 0.01867736 @@ -1214,7 +1214,7 @@

    24s Performance

    - + Vampire 0.01638628 @@ -1228,7 +1228,7 @@

    24s Performance

    - + Z3++ 0.01200778 @@ -1242,7 +1242,7 @@

    24s Performance

    - + Yices2 0.00961171 @@ -1256,7 +1256,7 @@

    24s Performance

    - + YicesQS 0.00304568 @@ -1270,7 +1270,7 @@

    24s Performance

    - + Bitwuzla 0.00252949 @@ -1284,7 +1284,7 @@

    24s Performance

    - + STP 0.00144345 @@ -1298,7 +1298,7 @@

    24s Performance

    - + Bitwuzla 0.00131927 @@ -1312,7 +1312,7 @@

    24s Performance

    - + Bitwuzla 0.00119926 @@ -1326,7 +1326,7 @@

    24s Performance

    - + Bitwuzla 0.00076897 @@ -1340,7 +1340,7 @@

    24s Performance

    - + Yices2 0.00067047 @@ -1354,7 +1354,7 @@

    24s Performance

    - + Z3++ 0.00063153 @@ -1368,7 +1368,7 @@

    24s Performance

    - + Yices2 0.00034444 @@ -1382,7 +1382,7 @@

    24s Performance

    - + Yices2 0.00031468 @@ -1396,7 +1396,7 @@

    24s Performance

    - + cvc5 0.00027468 @@ -1428,7 +1428,6 @@

    24s Performance

    - + - diff --git a/archive/2023/results/largest-contribution-unsat-core.html b/archive/2023/results/largest-contribution-unsat-core.html index 6bdd6e6e..8a4ce8c8 100644 --- a/archive/2023/results/largest-contribution-unsat-core.html +++ b/archive/2023/results/largest-contribution-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -90,35 +90,35 @@

    Winners

    - + - - + + - - - - + + + + - + - - + + - - - - + + + +
    Sequential PerformanceParallel Performance
    - + cvc5 - + - + cvc5 - +

    Sequential Performance

    @@ -136,7 +136,7 @@

    Sequential Performance

    - + cvc5 0.15778649 @@ -150,7 +150,7 @@

    Sequential Performance

    - + cvc5 0.05088905 @@ -164,7 +164,7 @@

    Sequential Performance

    - + cvc5 0.01204205 @@ -178,7 +178,7 @@

    Sequential Performance

    - + Yices2 0.00730203 @@ -192,7 +192,7 @@

    Sequential Performance

    - + cvc5 0.00586297 @@ -206,7 +206,7 @@

    Sequential Performance

    - + cvc5 0.00571191 @@ -220,7 +220,7 @@

    Sequential Performance

    - + Yices2 0.00529165 @@ -234,7 +234,7 @@

    Sequential Performance

    - + Yices2 0.00120988 @@ -248,7 +248,7 @@

    Sequential Performance

    - + Yices2 0.00112697 @@ -262,7 +262,7 @@

    Sequential Performance

    - + Bitwuzla 0.00020744 @@ -291,7 +291,7 @@

    Parallel Performance

    - + cvc5 0.15744236 @@ -305,7 +305,7 @@

    Parallel Performance

    - + cvc5 0.05037275 @@ -319,7 +319,7 @@

    Parallel Performance

    - + cvc5 0.01204205 @@ -333,7 +333,7 @@

    Parallel Performance

    - + Yices2 0.00722597 @@ -347,7 +347,7 @@

    Parallel Performance

    - + cvc5 0.00586297 @@ -361,7 +361,7 @@

    Parallel Performance

    - + cvc5 0.00571191 @@ -375,7 +375,7 @@

    Parallel Performance

    - + Yices2 0.00529165 @@ -389,7 +389,7 @@

    Parallel Performance

    - + Yices2 0.00120988 @@ -403,7 +403,7 @@

    Parallel Performance

    - + Yices2 0.00112697 @@ -417,7 +417,7 @@

    Parallel Performance

    - + Bitwuzla 0.00020744 @@ -455,7 +455,6 @@

    Parallel Performance

    - + - diff --git a/archive/2023/results/lia-incremental.html b/archive/2023/results/lia-incremental.html index ad1ffcc1..a7c3b1cd 100644 --- a/archive/2023/results/lia-incremental.html +++ b/archive/2023/results/lia-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    LIA (Incremental Track)

    Competition results for the LIA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    LIA (Incremental Track)

    Parallel Performance cvc5 - - + + @@ -124,7 +124,7 @@

    LIA (Incremental Track)

    - + 2021-cvc5-incn 0 2539322.3421.3800 @@ -133,7 +133,7 @@

    LIA (Incremental Track)

    - + cvc5 0 2539328.0627.1500 @@ -142,7 +142,7 @@

    LIA (Incremental Track)

    - + UltimateEliminator+MathSAT 0 25393280.22162.5100 @@ -151,7 +151,7 @@

    LIA (Incremental Track)

    - + SMTInterpol 0 2539189.5730.620 @@ -175,7 +175,6 @@

    LIA (Incremental Track)

    - + - diff --git a/archive/2023/results/lia-proof-exhibition.html b/archive/2023/results/lia-proof-exhibition.html index f7855fd3..c472fc12 100644 --- a/archive/2023/results/lia-proof-exhibition.html +++ b/archive/2023/results/lia-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    LIA (Proof Exhibition Track)

    Competition results for the LIA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    LIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 149 @@ -130,7 +130,7 @@

    LIA (Proof Exhibition Track)

    - + cvc5 0 143 @@ -141,7 +141,7 @@

    LIA (Proof Exhibition Track)

    - + SMTInterpol 0 101 @@ -163,7 +163,7 @@

    LIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 14964.60564.44811 @@ -172,7 +172,7 @@

    LIA (Proof Exhibition Track)

    - + cvc5 0 143728.539719.55576 @@ -181,7 +181,7 @@

    LIA (Proof Exhibition Track)

    - + SMTInterpol 0 101224.768122.8544910 @@ -205,7 +205,6 @@

    LIA (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/lia-single-query.html b/archive/2023/results/lia-single-query.html index 14ed43af..dcfab1a9 100644 --- a/archive/2023/results/lia-single-query.html +++ b/archive/2023/results/lia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    LIA (Single Query Track)

    Competition results for the LIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    LIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    LIA (Single Query Track)

    - + cvc5 0 300 @@ -142,7 +142,7 @@

    LIA (Single Query Track)

    - + 2021-z3n 0 292 @@ -153,7 +153,7 @@

    LIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 243 @@ -164,7 +164,7 @@

    LIA (Single Query Track)

    - + YicesQS 0 179 @@ -175,7 +175,7 @@

    LIA (Single Query Track)

    - + iProver Fixedn 0 116 @@ -186,7 +186,7 @@

    LIA (Single Query Track)

    - + SMTInterpol 0 98 @@ -197,7 +197,7 @@

    LIA (Single Query Track)

    - + iProver 21 128 @@ -208,7 +208,7 @@

    LIA (Single Query Track)

    - + Vampire 127 162 @@ -230,7 +230,7 @@

    LIA (Single Query Track)

    - + cvc5 0 30078.70178.66330013716300 @@ -239,7 +239,7 @@

    LIA (Single Query Track)

    - + 2021-z3n 0 2929.5399.56129213715588 @@ -248,7 +248,7 @@

    LIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 24513095.91711031.176245851605555 @@ -257,7 +257,7 @@

    LIA (Single Query Track)

    - + YicesQS 0 1791034.751034.9241799782121121 @@ -266,7 +266,7 @@

    LIA (Single Query Track)

    - + iProver Fixedn 0 1161217.929360.4011160116184173 @@ -275,7 +275,7 @@

    LIA (Single Query Track)

    - + SMTInterpol 0 98135.08891.2519889020298 @@ -284,7 +284,7 @@

    LIA (Single Query Track)

    - + iProver 21 1295767.7561508.3761290129171139 @@ -293,7 +293,7 @@

    LIA (Single Query Track)

    - + Vampire 127 162863.351233.043162016213811 @@ -313,7 +313,7 @@

    LIA (Single Query Track)

    - + 2021-z3n 0 1375.4275.436137137001638 @@ -322,7 +322,7 @@

    LIA (Single Query Track)

    - + cvc5 0 13760.14160.171137137001630 @@ -331,7 +331,7 @@

    LIA (Single Query Track)

    - + YicesQS 0 971028.9421029.0759797040163121 @@ -340,7 +340,7 @@

    LIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 8510638.6839230.434858505216355 @@ -349,7 +349,7 @@

    LIA (Single Query Track)

    - + SMTInterpol 0 84.3753.06388012916398 @@ -358,7 +358,7 @@

    LIA (Single Query Track)

    - + Vampire 0 00.00.000013716311 @@ -367,7 +367,7 @@

    LIA (Single Query Track)

    - + iProver 0 00.00.0000137163139 @@ -376,7 +376,7 @@

    LIA (Single Query Track)

    - + iProver Fixedn 0 00.00.0000137163173 @@ -396,7 +396,7 @@

    LIA (Single Query Track)

    - + cvc5 0 16318.5618.492163016301370 @@ -405,7 +405,7 @@

    LIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 1602457.2341800.7421600160313755 @@ -414,7 +414,7 @@

    LIA (Single Query Track)

    - + 2021-z3n 0 1554.1124.124155015581378 @@ -423,7 +423,7 @@

    LIA (Single Query Track)

    - + iProver Fixedn 0 1161217.929360.401116011647137173 @@ -432,7 +432,7 @@

    LIA (Single Query Track)

    - + SMTInterpol 0 90130.71388.189900907313798 @@ -441,7 +441,7 @@

    LIA (Single Query Track)

    - + YicesQS 0 825.8085.8498208281137121 @@ -450,7 +450,7 @@

    LIA (Single Query Track)

    - + iProver 21 1295767.7561508.376129012934137139 @@ -459,7 +459,7 @@

    LIA (Single Query Track)

    - + Vampire 127 162863.351233.0431620162113711 @@ -479,7 +479,7 @@

    LIA (Single Query Track)

    - + cvc5 0 30078.70178.66330013716300 @@ -488,7 +488,7 @@

    LIA (Single Query Track)

    - + 2021-z3n 0 2929.5399.56129213715588 @@ -497,7 +497,7 @@

    LIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 2122310.8111120.735212611518888 @@ -506,7 +506,7 @@

    LIA (Single Query Track)

    - + YicesQS 0 17055.38155.5381708882130130 @@ -515,7 +515,7 @@

    LIA (Single Query Track)

    - + iProver Fixedn 0 114855.088266.9911140114186175 @@ -524,7 +524,7 @@

    LIA (Single Query Track)

    - + SMTInterpol 0 98135.08891.25198890202132 @@ -533,7 +533,7 @@

    LIA (Single Query Track)

    - + iProver 16 1261190.466347.6031260126174147 @@ -542,7 +542,7 @@

    LIA (Single Query Track)

    - + Vampire 126 161175.94159.722161016113913 @@ -566,7 +566,6 @@

    LIA (Single Query Track)

    - + - diff --git a/archive/2023/results/lia-unsat-core.html b/archive/2023/results/lia-unsat-core.html index cec29890..2a2f62aa 100644 --- a/archive/2023/results/lia-unsat-core.html +++ b/archive/2023/results/lia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    LIA (Unsat Core Track)

    Competition results for the LIA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    LIA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    LIA (Unsat Core Track)

    - + cvc5 0 7 @@ -137,7 +137,7 @@

    LIA (Unsat Core Track)

    - + 2022-cvc5n 0 7 @@ -148,7 +148,7 @@

    LIA (Unsat Core Track)

    - + Vampire 0 0 @@ -159,7 +159,7 @@

    LIA (Unsat Core Track)

    - + SMTInterpol 0 0 @@ -170,7 +170,7 @@

    LIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 1 6 @@ -192,7 +192,7 @@

    LIA (Unsat Core Track)

    - + cvc5 0 712.14112.0371 @@ -201,7 +201,7 @@

    LIA (Unsat Core Track)

    - + 2022-cvc5n 0 719.63519.6041 @@ -210,7 +210,7 @@

    LIA (Unsat Core Track)

    - + Vampire 0 057.64328.5091 @@ -219,7 +219,7 @@

    LIA (Unsat Core Track)

    - + SMTInterpol 0 0114.96478.28318 @@ -228,7 +228,7 @@

    LIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 1 62838.5191910.6923 @@ -252,7 +252,6 @@

    LIA (Unsat Core Track)

    - + - diff --git a/archive/2023/results/lra-cloud.html b/archive/2023/results/lra-cloud.html index d00f957a..2a1bba6d 100644 --- a/archive/2023/results/lra-cloud.html +++ b/archive/2023/results/lra-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    LRA (Cloud Track)

    Competition results for the LRA - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    LRA (Cloud Track)

    - + Vampire 0 293.26220280 @@ -126,7 +126,7 @@

    LRA (Cloud Track)

    - + cvc5 0 00.0000100 @@ -146,7 +146,7 @@

    LRA (Cloud Track)

    - + Vampire 0 00.00000100 @@ -155,7 +155,7 @@

    LRA (Cloud Track)

    - + cvc5 0 00.00000100 @@ -175,7 +175,7 @@

    LRA (Cloud Track)

    - + Vampire 0 293.262202350 @@ -184,7 +184,7 @@

    LRA (Cloud Track)

    - + cvc5 0 00.0000550 @@ -204,7 +204,7 @@

    LRA (Cloud Track)

    - + Vampire 0 00.00001010 @@ -213,7 +213,7 @@

    LRA (Cloud Track)

    - + cvc5 0 00.00001010 @@ -237,7 +237,6 @@

    LRA (Cloud Track)

    - + - diff --git a/archive/2023/results/lra-incremental.html b/archive/2023/results/lra-incremental.html index da7b58cf..884de463 100644 --- a/archive/2023/results/lra-incremental.html +++ b/archive/2023/results/lra-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    LRA (Incremental Track)

    Competition results for the LRA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    LRA (Incremental Track)

    Parallel Performance cvc5 - - + + @@ -124,7 +124,7 @@

    LRA (Incremental Track)

    - + 2021-cvc5-incn 0 1596972.2772.1100 @@ -133,7 +133,7 @@

    LRA (Incremental Track)

    - + cvc5 0 15969143.3143.1700 @@ -142,7 +142,7 @@

    LRA (Incremental Track)

    - + UltimateEliminator+MathSAT 0 15969418.59322.3800 @@ -151,7 +151,7 @@

    LRA (Incremental Track)

    - + SMTInterpol 0 12723119.4954.2232461 @@ -175,7 +175,6 @@

    LRA (Incremental Track)

    - + - diff --git a/archive/2023/results/lra-parallel.html b/archive/2023/results/lra-parallel.html index 63c075ee..38b2226b 100644 --- a/archive/2023/results/lra-parallel.html +++ b/archive/2023/results/lra-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    LRA (Parallel Track)

    Competition results for the LRA - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    LRA (Parallel Track)

    - + Vampire 0 22.62120280 @@ -126,7 +126,7 @@

    LRA (Parallel Track)

    - + iProver 0 00.0000100 @@ -146,7 +146,7 @@

    LRA (Parallel Track)

    - + Vampire 0 00.00000100 @@ -155,7 +155,7 @@

    LRA (Parallel Track)

    - + iProver 0 00.00000100 @@ -175,7 +175,7 @@

    LRA (Parallel Track)

    - + Vampire 0 22.621202350 @@ -184,7 +184,7 @@

    LRA (Parallel Track)

    - + iProver 0 00.0000550 @@ -204,7 +204,7 @@

    LRA (Parallel Track)

    - + Vampire 0 22.62120288 @@ -213,7 +213,7 @@

    LRA (Parallel Track)

    - + iProver 0 00.00001010 @@ -237,7 +237,6 @@

    LRA (Parallel Track)

    - + - diff --git a/archive/2023/results/lra-proof-exhibition.html b/archive/2023/results/lra-proof-exhibition.html index 322ae43a..74f62116 100644 --- a/archive/2023/results/lra-proof-exhibition.html +++ b/archive/2023/results/lra-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    LRA (Proof Exhibition Track)

    Competition results for the LRA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    LRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 337 @@ -130,7 +130,7 @@

    LRA (Proof Exhibition Track)

    - + cvc5 0 335 @@ -141,7 +141,7 @@

    LRA (Proof Exhibition Track)

    - + SMTInterpol 0 166 @@ -163,7 +163,7 @@

    LRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 3378909.3258906.3988888 @@ -172,7 +172,7 @@

    LRA (Proof Exhibition Track)

    - + cvc5 0 3358122.4928108.9969090 @@ -181,7 +181,7 @@

    LRA (Proof Exhibition Track)

    - + SMTInterpol 0 1661776.149804.8442593 @@ -205,7 +205,6 @@

    LRA (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/lra-single-query.html b/archive/2023/results/lra-single-query.html index d1f49859..e6f7ade3 100644 --- a/archive/2023/results/lra-single-query.html +++ b/archive/2023/results/lra-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    LRA (Single Query Track)

    Competition results for the LRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    LRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) YicesQSYicesQSYicesQS - - + + YicesQS - - + + YicesQS - + @@ -131,7 +131,7 @@

    LRA (Single Query Track)

    - + YicesQS 0 1003 @@ -142,7 +142,7 @@

    LRA (Single Query Track)

    - + 2021-z3n 0 940 @@ -153,7 +153,7 @@

    LRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 848 @@ -164,7 +164,7 @@

    LRA (Single Query Track)

    - + cvc5 0 831 @@ -175,7 +175,7 @@

    LRA (Single Query Track)

    - + Vampire 0 496 @@ -186,7 +186,7 @@

    LRA (Single Query Track)

    - + iProver Fixedn 0 180 @@ -197,7 +197,7 @@

    LRA (Single Query Track)

    - + iProver 0 177 @@ -208,7 +208,7 @@

    LRA (Single Query Track)

    - + SMTInterpol 0 156 @@ -230,7 +230,7 @@

    LRA (Single Query Track)

    - + YicesQS 0 1003578.813578.94710033976061010 @@ -239,7 +239,7 @@

    LRA (Single Query Track)

    - + 2021-z3n 0 94038384.338354.5819403695717373 @@ -248,7 +248,7 @@

    LRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 84817996.59213874.916848325523165165 @@ -257,7 +257,7 @@

    LRA (Single Query Track)

    - + cvc5 0 83122819.922842.751831332499182182 @@ -266,7 +266,7 @@

    LRA (Single Query Track)

    - + Vampire 0 49753516.36913551.3834970497516516 @@ -275,7 +275,7 @@

    LRA (Single Query Track)

    - + iProver 0 20484401.35621459.1312040204809809 @@ -284,7 +284,7 @@

    LRA (Single Query Track)

    - + iProver Fixedn 0 20168856.73717515.8192010201812812 @@ -293,7 +293,7 @@

    LRA (Single Query Track)

    - + SMTInterpol 0 1562628.8421825.615156215485735 @@ -313,7 +313,7 @@

    LRA (Single Query Track)

    - + YicesQS 0 397435.982436.0593973970461210 @@ -322,7 +322,7 @@

    LRA (Single Query Track)

    - + 2021-z3n 0 3695477.9155459.82536936903261273 @@ -331,7 +331,7 @@

    LRA (Single Query Track)

    - + cvc5 0 3326082.3386085.623332332069612182 @@ -340,7 +340,7 @@

    LRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 3255213.283600.532325325076612165 @@ -349,7 +349,7 @@

    LRA (Single Query Track)

    - + SMTInterpol 0 21.1810.80722039961235 @@ -358,7 +358,7 @@

    LRA (Single Query Track)

    - + Vampire 0 00.00.0000401612516 @@ -367,7 +367,7 @@

    LRA (Single Query Track)

    - + iProver 0 00.00.0000401612809 @@ -376,7 +376,7 @@

    LRA (Single Query Track)

    - + iProver Fixedn 0 00.00.0000401612812 @@ -396,7 +396,7 @@

    LRA (Single Query Track)

    - + YicesQS 0 606142.831142.8886060606640110 @@ -405,7 +405,7 @@

    LRA (Single Query Track)

    - + 2021-z3n 0 57132906.38532894.75657105714140173 @@ -414,7 +414,7 @@

    LRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 52312783.31110274.383523052389401165 @@ -423,7 +423,7 @@

    LRA (Single Query Track)

    - + cvc5 0 49916737.56116757.1284990499113401182 @@ -432,7 +432,7 @@

    LRA (Single Query Track)

    - + Vampire 0 49753516.36913551.3834970497115401516 @@ -441,7 +441,7 @@

    LRA (Single Query Track)

    - + iProver 0 20484401.35621459.1312040204408401809 @@ -450,7 +450,7 @@

    LRA (Single Query Track)

    - + iProver Fixedn 0 20168856.73717515.8192010201411401812 @@ -459,7 +459,7 @@

    LRA (Single Query Track)

    - + SMTInterpol 0 1542627.661824.809154015445840135 @@ -479,7 +479,7 @@

    LRA (Single Query Track)

    - + YicesQS 0 999125.678125.7259993946051414 @@ -488,7 +488,7 @@

    LRA (Single Query Track)

    - + 2021-z3n 0 8101336.6121304.407810341469203203 @@ -497,7 +497,7 @@

    LRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 7936590.1343243.073793310483220220 @@ -506,7 +506,7 @@

    LRA (Single Query Track)

    - + cvc5 0 719355.613341.296719277442294294 @@ -515,7 +515,7 @@

    LRA (Single Query Track)

    - + Vampire 0 2342164.808584.3652340234779779 @@ -524,7 +524,7 @@

    LRA (Single Query Track)

    - + SMTInterpol 0 149857.297315.313149214786468 @@ -533,7 +533,7 @@

    LRA (Single Query Track)

    - + iProver 0 1252894.169786.6981250125888888 @@ -542,7 +542,7 @@

    LRA (Single Query Track)

    - + iProver Fixedn 0 1252982.45800.1541250125888888 @@ -566,7 +566,6 @@

    LRA (Single Query Track)

    - + - diff --git a/archive/2023/results/nia-proof-exhibition.html b/archive/2023/results/nia-proof-exhibition.html index de472333..3871ad67 100644 --- a/archive/2023/results/nia-proof-exhibition.html +++ b/archive/2023/results/nia-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    NIA (Proof Exhibition Track)

    Competition results for the NIA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    NIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 146 @@ -130,7 +130,7 @@

    NIA (Proof Exhibition Track)

    - + cvc5 0 145 @@ -152,7 +152,7 @@

    NIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 146473.905473.8962119 @@ -161,7 +161,7 @@

    NIA (Proof Exhibition Track)

    - + cvc5 0 145880.715876.3252220 @@ -185,7 +185,6 @@

    NIA (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/nia-single-query.html b/archive/2023/results/nia-single-query.html index 3a0a9649..e5c451f0 100644 --- a/archive/2023/results/nia-single-query.html +++ b/archive/2023/results/nia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    NIA (Single Query Track)

    Competition results for the NIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    NIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    NIA (Single Query Track)

    - + cvc5 0 222 @@ -142,7 +142,7 @@

    NIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 145 @@ -153,7 +153,7 @@

    NIA (Single Query Track)

    - + YicesQS 0 112 @@ -164,7 +164,7 @@

    NIA (Single Query Track)

    - + 2021-z3n 0 94 @@ -175,7 +175,7 @@

    NIA (Single Query Track)

    - + iProver Fixedn 0 36 @@ -186,7 +186,7 @@

    NIA (Single Query Track)

    - + iProver 1 38 @@ -197,7 +197,7 @@

    NIA (Single Query Track)

    - + Vampire 3 82 @@ -219,7 +219,7 @@

    NIA (Single Query Track)

    - + cvc5 0 2224130.6544148.959222761462626 @@ -228,7 +228,7 @@

    NIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 1451180.941920.824145489710330 @@ -237,7 +237,7 @@

    NIA (Single Query Track)

    - + YicesQS 0 1121163.3791163.431126844136136 @@ -246,7 +246,7 @@

    NIA (Single Query Track)

    - + 2021-z3n 0 94117.959117.97694613315466 @@ -255,7 +255,7 @@

    NIA (Single Query Track)

    - + iProver Fixedn 0 372528.348653.20937037211211 @@ -264,7 +264,7 @@

    NIA (Single Query Track)

    - + iProver 1 382084.679530.33738038210209 @@ -273,7 +273,7 @@

    NIA (Single Query Track)

    - + Vampire 3 8925844.0366519.30589089159156 @@ -293,7 +293,7 @@

    NIA (Single Query Track)

    - + cvc5 0 763313.383326.30676760516726 @@ -302,7 +302,7 @@

    NIA (Single Query Track)

    - + YicesQS 0 68742.296742.36868013167136 @@ -311,7 +311,7 @@

    NIA (Single Query Track)

    - + 2021-z3n 0 616.6166.621616102016766 @@ -320,7 +320,7 @@

    NIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 48641.762565.515484803316730 @@ -329,7 +329,7 @@

    NIA (Single Query Track)

    - + Vampire 0 00.00.000081167156 @@ -338,7 +338,7 @@

    NIA (Single Query Track)

    - + iProver 0 00.00.000081167209 @@ -347,7 +347,7 @@

    NIA (Single Query Track)

    - + iProver Fixedn 0 00.00.000081167211 @@ -367,7 +367,7 @@

    NIA (Single Query Track)

    - + cvc5 0 146817.274822.6531460146178526 @@ -376,7 +376,7 @@

    NIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 97539.179355.30997097668530 @@ -385,7 +385,7 @@

    NIA (Single Query Track)

    - + YicesQS 0 44421.082421.134404411985136 @@ -394,7 +394,7 @@

    NIA (Single Query Track)

    - + iProver Fixedn 0 372528.348653.2093703712685211 @@ -403,7 +403,7 @@

    NIA (Single Query Track)

    - + 2021-z3n 0 33111.343111.355330331308566 @@ -412,7 +412,7 @@

    NIA (Single Query Track)

    - + iProver 1 382084.679530.3373803812585209 @@ -421,7 +421,7 @@

    NIA (Single Query Track)

    - + Vampire 3 8925844.0366519.305890897485156 @@ -441,7 +441,7 @@

    NIA (Single Query Track)

    - + cvc5 0 20638.19338.105206631434242 @@ -450,7 +450,7 @@

    NIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 142805.314452.624142469610633 @@ -459,7 +459,7 @@

    NIA (Single Query Track)

    - + YicesQS 0 10686.7486.7441066541142142 @@ -468,7 +468,7 @@

    NIA (Single Query Track)

    - + 2021-z3n 0 9341.39441.40493613215574 @@ -477,7 +477,7 @@

    NIA (Single Query Track)

    - + iProver Fixedn 0 3475.60828.67934034214214 @@ -486,7 +486,7 @@

    NIA (Single Query Track)

    - + iProver 0 3493.97433.21234034214214 @@ -495,7 +495,7 @@

    NIA (Single Query Track)

    - + Vampire 1 41518.656135.78941041207206 @@ -519,7 +519,6 @@

    NIA (Single Query Track)

    - + - diff --git a/archive/2023/results/nia-unsat-core.html b/archive/2023/results/nia-unsat-core.html index 8f847989..53b15243 100644 --- a/archive/2023/results/nia-unsat-core.html +++ b/archive/2023/results/nia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    NIA (Unsat Core Track)

    Competition results for the NIA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    NIA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    NIA (Unsat Core Track)

    - + cvc5 0 259 @@ -137,7 +137,7 @@

    NIA (Unsat Core Track)

    - + 2022-cvc5n 0 244 @@ -148,7 +148,7 @@

    NIA (Unsat Core Track)

    - + Vampire 0 0 @@ -159,7 +159,7 @@

    NIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 49 125 @@ -181,7 +181,7 @@

    NIA (Unsat Core Track)

    - + cvc5 0 259415.968415.96919 @@ -190,7 +190,7 @@

    NIA (Unsat Core Track)

    - + 2022-cvc5n 0 24495.75495.76718 @@ -199,7 +199,7 @@

    NIA (Unsat Core Track)

    - + Vampire 0 015119.5693811.62775 @@ -208,7 +208,7 @@

    NIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 49 125525.085316.016 @@ -232,7 +232,6 @@

    NIA (Unsat Core Track)

    - + - diff --git a/archive/2023/results/nra-cloud.html b/archive/2023/results/nra-cloud.html index 4c468929..b82fea83 100644 --- a/archive/2023/results/nra-cloud.html +++ b/archive/2023/results/nra-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    NRA (Cloud Track)

    Competition results for the NRA - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    NRA (Cloud Track)

    - + Vampire 0 00.000010 @@ -126,7 +126,7 @@

    NRA (Cloud Track)

    - + cvc5 0 00.000010 @@ -146,7 +146,7 @@

    NRA (Cloud Track)

    - + Vampire 0 00.0000010 @@ -155,7 +155,7 @@

    NRA (Cloud Track)

    - + cvc5 0 00.0000010 @@ -175,7 +175,7 @@

    NRA (Cloud Track)

    - + Vampire 0 00.0000010 @@ -184,7 +184,7 @@

    NRA (Cloud Track)

    - + cvc5 0 00.0000010 @@ -204,7 +204,7 @@

    NRA (Cloud Track)

    - + Vampire 0 00.000011 @@ -213,7 +213,7 @@

    NRA (Cloud Track)

    - + cvc5 0 00.000011 @@ -237,7 +237,6 @@

    NRA (Cloud Track)

    - + - diff --git a/archive/2023/results/nra-parallel.html b/archive/2023/results/nra-parallel.html index 32ebc243..c3261908 100644 --- a/archive/2023/results/nra-parallel.html +++ b/archive/2023/results/nra-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    NRA (Parallel Track)

    Competition results for the NRA - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    NRA (Parallel Track)

    - + Vampire 0 00.000010 @@ -126,7 +126,7 @@

    NRA (Parallel Track)

    - + iProver 0 00.000010 @@ -146,7 +146,7 @@

    NRA (Parallel Track)

    - + Vampire 0 00.0000010 @@ -155,7 +155,7 @@

    NRA (Parallel Track)

    - + iProver 0 00.0000010 @@ -175,7 +175,7 @@

    NRA (Parallel Track)

    - + Vampire 0 00.0000010 @@ -184,7 +184,7 @@

    NRA (Parallel Track)

    - + iProver 0 00.0000010 @@ -204,7 +204,7 @@

    NRA (Parallel Track)

    - + Vampire 0 00.000011 @@ -213,7 +213,7 @@

    NRA (Parallel Track)

    - + iProver 0 00.000011 @@ -237,7 +237,6 @@

    NRA (Parallel Track)

    - + - diff --git a/archive/2023/results/nra-proof-exhibition.html b/archive/2023/results/nra-proof-exhibition.html index 24e93cb5..1e6193ea 100644 --- a/archive/2023/results/nra-proof-exhibition.html +++ b/archive/2023/results/nra-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    NRA (Proof Exhibition Track)

    Competition results for the NRA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    NRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 950 @@ -130,7 +130,7 @@

    NRA (Proof Exhibition Track)

    - + cvc5 0 950 @@ -152,7 +152,7 @@

    NRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 95066.65965.72988 @@ -161,7 +161,7 @@

    NRA (Proof Exhibition Track)

    - + cvc5 0 950216.902214.488 @@ -185,7 +185,6 @@

    NRA (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/nra-single-query.html b/archive/2023/results/nra-single-query.html index 1bc98a0a..87c2376a 100644 --- a/archive/2023/results/nra-single-query.html +++ b/archive/2023/results/nra-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    NRA (Single Query Track)

    Competition results for the NRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    NRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) YicesQSYicesQSYicesQS - - + + YicesQS - - + + YicesQS - + @@ -131,7 +131,7 @@

    NRA (Single Query Track)

    - + YicesQS 0 92 @@ -142,7 +142,7 @@

    NRA (Single Query Track)

    - + 2021-z3n 0 91 @@ -153,7 +153,7 @@

    NRA (Single Query Track)

    - + cvc5 0 88 @@ -164,7 +164,7 @@

    NRA (Single Query Track)

    - + Vampire 0 83 @@ -175,7 +175,7 @@

    NRA (Single Query Track)

    - + iProver 0 75 @@ -186,7 +186,7 @@

    NRA (Single Query Track)

    - + iProver Fixedn 0 72 @@ -197,7 +197,7 @@

    NRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 8 @@ -219,7 +219,7 @@

    NRA (Single Query Track)

    - + YicesQS 0 92914.146914.2239248877 @@ -228,7 +228,7 @@

    NRA (Single Query Track)

    - + 2021-z3n 0 91583.03583.0339138886 @@ -237,7 +237,7 @@

    NRA (Single Query Track)

    - + cvc5 0 881826.6041831.985884841111 @@ -246,7 +246,7 @@

    NRA (Single Query Track)

    - + Vampire 0 83121.62538.885830831616 @@ -255,7 +255,7 @@

    NRA (Single Query Track)

    - + iProver 0 767737.9251986.057760762323 @@ -264,7 +264,7 @@

    NRA (Single Query Track)

    - + iProver Fixedn 0 746564.7941735.419740742525 @@ -273,7 +273,7 @@

    NRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 847.58229.68179135 @@ -293,7 +293,7 @@

    NRA (Single Query Track)

    - + YicesQS 0 40.6270.6264401947 @@ -302,7 +302,7 @@

    NRA (Single Query Track)

    - + cvc5 0 4302.604302.64844019411 @@ -311,7 +311,7 @@

    NRA (Single Query Track)

    - + 2021-z3n 0 30.0780.0793302946 @@ -320,7 +320,7 @@

    NRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 14.7212.74511049435 @@ -329,7 +329,7 @@

    NRA (Single Query Track)

    - + Vampire 0 00.00.000059416 @@ -338,7 +338,7 @@

    NRA (Single Query Track)

    - + iProver 0 00.00.000059423 @@ -347,7 +347,7 @@

    NRA (Single Query Track)

    - + iProver Fixedn 0 00.00.000059425 @@ -367,7 +367,7 @@

    NRA (Single Query Track)

    - + 2021-z3n 0 88582.951582.95488088566 @@ -376,7 +376,7 @@

    NRA (Single Query Track)

    - + YicesQS 0 88913.519913.59688088567 @@ -385,7 +385,7 @@

    NRA (Single Query Track)

    - + cvc5 0 841523.9991529.337840849611 @@ -394,7 +394,7 @@

    NRA (Single Query Track)

    - + Vampire 0 83121.62538.8858308310616 @@ -403,7 +403,7 @@

    NRA (Single Query Track)

    - + iProver 0 767737.9251986.0577607617623 @@ -412,7 +412,7 @@

    NRA (Single Query Track)

    - + iProver Fixedn 0 746564.7941735.4197407419625 @@ -421,7 +421,7 @@

    NRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 742.86126.85570786635 @@ -441,7 +441,7 @@

    NRA (Single Query Track)

    - + YicesQS 0 9021.36721.379048699 @@ -450,7 +450,7 @@

    NRA (Single Query Track)

    - + 2021-z3n 0 891.9321.94893861010 @@ -459,7 +459,7 @@

    NRA (Single Query Track)

    - + cvc5 0 838.158.11833801616 @@ -468,7 +468,7 @@

    NRA (Single Query Track)

    - + Vampire 0 83121.62538.885830831616 @@ -477,7 +477,7 @@

    NRA (Single Query Track)

    - + iProver 0 69256.90489.034690693030 @@ -486,7 +486,7 @@

    NRA (Single Query Track)

    - + iProver Fixedn 0 69277.82694.127690693030 @@ -495,7 +495,7 @@

    NRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 847.58229.68179136 @@ -519,7 +519,6 @@

    NRA (Single Query Track)

    - + - diff --git a/archive/2023/results/qf-abv-incremental.html b/archive/2023/results/qf-abv-incremental.html index d51432d8..50804f55 100644 --- a/archive/2023/results/qf-abv-incremental.html +++ b/archive/2023/results/qf-abv-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ABV (Incremental Track)

    Competition results for the QF_ABV - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_ABV (Incremental Track)

    Parallel Performance Bitwuzla - - + + @@ -124,7 +124,7 @@

    QF_ABV (Incremental Track)

    - + Bitwuzla 0 169112082.0712129.871818 @@ -133,7 +133,7 @@

    QF_ABV (Incremental Track)

    - + Yices2 Fixedn 0 16826096.386134.362714 @@ -142,7 +142,7 @@

    QF_ABV (Incremental Track)

    - + Yices2 0 16826234.936272.272714 @@ -151,7 +151,7 @@

    QF_ABV (Incremental Track)

    - + 2022-Yices2n 0 16826301.476341.322714 @@ -160,7 +160,7 @@

    QF_ABV (Incremental Track)

    - + cvc5 0 122816452.916498.95481197 @@ -184,7 +184,6 @@

    QF_ABV (Incremental Track)

    - + - diff --git a/archive/2023/results/qf-abv-model-validation.html b/archive/2023/results/qf-abv-model-validation.html index 80ff82ad..53b440d5 100644 --- a/archive/2023/results/qf-abv-model-validation.html +++ b/archive/2023/results/qf-abv-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ABV (Model Validation Track)

    Competition results for the QF_ABV - + logic - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_ABV (Model Validation Track)

    Sequential PerformanceParallel Performance BitwuzlaBitwuzla - - + + @@ -126,7 +126,7 @@

    QF_ABV (Model Validation Track)

    - + Bitwuzla Fixedn 0 5197 @@ -137,7 +137,7 @@

    QF_ABV (Model Validation Track)

    - + Bitwuzla 0 5197 @@ -148,7 +148,7 @@

    QF_ABV (Model Validation Track)

    - + cvc5 0 4828 @@ -159,7 +159,7 @@

    QF_ABV (Model Validation Track)

    - + Yices2 0 2 @@ -181,7 +181,7 @@

    QF_ABV (Model Validation Track)

    - + Bitwuzla Fixedn 0 51977039.9757079.7863 @@ -190,7 +190,7 @@

    QF_ABV (Model Validation Track)

    - + Bitwuzla 0 51977042.0677089.4623 @@ -199,7 +199,7 @@

    QF_ABV (Model Validation Track)

    - + cvc5 0 4828125835.0125767.572388 @@ -208,7 +208,7 @@

    QF_ABV (Model Validation Track)

    - + Yices2 0 2223.7223.7535 @@ -232,7 +232,6 @@

    QF_ABV (Model Validation Track)

    - + - diff --git a/archive/2023/results/qf-abv-proof-exhibition.html b/archive/2023/results/qf-abv-proof-exhibition.html index 2131726d..35e73023 100644 --- a/archive/2023/results/qf-abv-proof-exhibition.html +++ b/archive/2023/results/qf-abv-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ABV (Proof Exhibition Track)

    Competition results for the QF_ABV - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_ABV (Proof Exhibition Track)

    - + cvc5-lfsc 0 1101 @@ -130,7 +130,7 @@

    QF_ABV (Proof Exhibition Track)

    - + cvc5 0 791 @@ -152,7 +152,7 @@

    QF_ABV (Proof Exhibition Track)

    - + cvc5-lfsc 0 110132757.66332399.0227070 @@ -161,7 +161,7 @@

    QF_ABV (Proof Exhibition Track)

    - + cvc5 0 7911636.0821589.84380357 @@ -185,7 +185,6 @@

    QF_ABV (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/qf-abv-single-query.html b/archive/2023/results/qf-abv-single-query.html index fe940e16..3ebdef0c 100644 --- a/archive/2023/results/qf-abv-single-query.html +++ b/archive/2023/results/qf-abv-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ABV (Single Query Track)

    Competition results for the QF_ABV - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_ABV (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) BitwuzlaBitwuzlaBitwuzla - - + + Bitwuzla - - + + Bitwuzla - + @@ -131,7 +131,7 @@

    QF_ABV (Single Query Track)

    - + Bitwuzla 0 1324 @@ -142,7 +142,7 @@

    QF_ABV (Single Query Track)

    - + Bitwuzla Fixedn 0 1323 @@ -153,7 +153,7 @@

    QF_ABV (Single Query Track)

    - + Yices2 0 1315 @@ -164,7 +164,7 @@

    QF_ABV (Single Query Track)

    - + 2022-Bitwuzlan 0 1296 @@ -175,7 +175,7 @@

    QF_ABV (Single Query Track)

    - + cvc5 0 1238 @@ -186,7 +186,7 @@

    QF_ABV (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 1 483 @@ -197,7 +197,7 @@

    QF_ABV (Single Query Track)

    - + Z3-Owl Fixedn 2 1233 @@ -208,7 +208,7 @@

    QF_ABV (Single Query Track)

    - + Z3-Owl 285 1029 @@ -230,7 +230,7 @@

    QF_ABV (Single Query Track)

    - + Bitwuzla 0 132415092.35614971.48413248175071717 @@ -239,7 +239,7 @@

    QF_ABV (Single Query Track)

    - + Bitwuzla Fixedn 0 132314873.01314816.76613238175061818 @@ -248,7 +248,7 @@

    QF_ABV (Single Query Track)

    - + Yices2 0 131519713.92219702.31513158145012626 @@ -257,7 +257,7 @@

    QF_ABV (Single Query Track)

    - + 2022-Bitwuzlan 0 129610747.61110668.91212967905064516 @@ -266,7 +266,7 @@

    QF_ABV (Single Query Track)

    - + cvc5 0 123826222.37626076.3711238770468103103 @@ -275,7 +275,7 @@

    QF_ABV (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 1 48451103.52942549.677484300184857787 @@ -284,7 +284,7 @@

    QF_ABV (Single Query Track)

    - + Z3-Owl Fixedn 2 123342406.99842462.4211233761472108105 @@ -293,7 +293,7 @@

    QF_ABV (Single Query Track)

    - + Z3-Owl 285 10296211.7496411.543102980722231214 @@ -313,7 +313,7 @@

    QF_ABV (Single Query Track)

    - + Bitwuzla Fixedn 0 8176085.3986070.6238178170252218 @@ -322,7 +322,7 @@

    QF_ABV (Single Query Track)

    - + Bitwuzla 0 8176102.0846097.6858178170252217 @@ -331,7 +331,7 @@

    QF_ABV (Single Query Track)

    - + Yices2 0 8148432.4538420.7388148140552226 @@ -340,7 +340,7 @@

    QF_ABV (Single Query Track)

    - + 2022-Bitwuzlan 0 7906148.9146072.50479079002952216 @@ -349,7 +349,7 @@

    QF_ABV (Single Query Track)

    - + cvc5 0 77013376.71713336.357770770049522103 @@ -358,7 +358,7 @@

    QF_ABV (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 1 30041474.43835220.3873003000519522787 @@ -367,7 +367,7 @@

    QF_ABV (Single Query Track)

    - + Z3-Owl Fixedn 2 76121831.64421882.248761761058522105 @@ -376,7 +376,7 @@

    QF_ABV (Single Query Track)

    - + Z3-Owl 285 8074260.1694454.00680780701252214 @@ -396,7 +396,7 @@

    QF_ABV (Single Query Track)

    - + Bitwuzla 0 5078990.2728873.79950705071581917 @@ -405,7 +405,7 @@

    QF_ABV (Single Query Track)

    - + 2022-Bitwuzlan 0 5064598.6974596.40850605061681916 @@ -414,7 +414,7 @@

    QF_ABV (Single Query Track)

    - + Bitwuzla Fixedn 0 5068787.6148746.14350605061681918 @@ -423,7 +423,7 @@

    QF_ABV (Single Query Track)

    - + Yices2 0 50111281.46911281.57750105012181926 @@ -432,7 +432,7 @@

    QF_ABV (Single Query Track)

    - + Z3-Owl Fixedn 0 47220575.35420580.173472047250819105 @@ -441,7 +441,7 @@

    QF_ABV (Single Query Track)

    - + cvc5 0 46812845.65912740.015468046854819103 @@ -450,7 +450,7 @@

    QF_ABV (Single Query Track)

    - + Z3-Owl 0 2221951.581957.537222022230081914 @@ -459,7 +459,7 @@

    QF_ABV (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 0 1849629.0917329.291840184338819787 @@ -479,7 +479,7 @@

    QF_ABV (Single Query Track)

    - + Bitwuzla 0 12584280.0234243.21612587854738383 @@ -488,7 +488,7 @@

    QF_ABV (Single Query Track)

    - + Bitwuzla Fixedn 0 12554242.414184.112557854708686 @@ -497,7 +497,7 @@

    QF_ABV (Single Query Track)

    - + Yices2 0 12461482.3661468.73412467864609595 @@ -506,7 +506,7 @@

    QF_ABV (Single Query Track)

    - + 2022-Bitwuzlan 0 12351137.2531134.327123574948610677 @@ -515,7 +515,7 @@

    QF_ABV (Single Query Track)

    - + cvc5 0 9821758.9021740.686982678304359359 @@ -524,7 +524,7 @@

    QF_ABV (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 0 3755571.3562067.52375212163966907 @@ -533,7 +533,7 @@

    QF_ABV (Single Query Track)

    - + Z3-Owl Fixedn 2 10662222.4942207.2061066690376275273 @@ -542,7 +542,7 @@

    QF_ABV (Single Query Track)

    - + Z3-Owl 282 9871874.7681911.97698777321435461 @@ -566,7 +566,6 @@

    QF_ABV (Single Query Track)

    - + - diff --git a/archive/2023/results/qf-abv-unsat-core.html b/archive/2023/results/qf-abv-unsat-core.html index f476b5ed..c92aabf7 100644 --- a/archive/2023/results/qf-abv-unsat-core.html +++ b/archive/2023/results/qf-abv-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ABV (Unsat Core Track)

    Competition results for the QF_ABV - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_ABV (Unsat Core Track)

    Sequential PerformanceParallel Performance Yices2Yices2 - - + + @@ -126,7 +126,7 @@

    QF_ABV (Unsat Core Track)

    - + 2022-Bitwuzlan 0 160770 @@ -137,7 +137,7 @@

    QF_ABV (Unsat Core Track)

    - + Bitwuzla Fixedn 0 157780 @@ -148,7 +148,7 @@

    QF_ABV (Unsat Core Track)

    - + Yices2 0 149665 @@ -159,7 +159,7 @@

    QF_ABV (Unsat Core Track)

    - + cvc5 0 103483 @@ -170,7 +170,7 @@

    QF_ABV (Unsat Core Track)

    - + Bitwuzla 3* 159271 @@ -192,7 +192,7 @@

    QF_ABV (Unsat Core Track)

    - + 2022-Bitwuzlan 0 16077010081.41510022.74317 @@ -201,7 +201,7 @@

    QF_ABV (Unsat Core Track)

    - + Bitwuzla Fixedn 0 1577805831.9315832.498 @@ -210,7 +210,7 @@

    QF_ABV (Unsat Core Track)

    - + Yices2 0 1496659213.7179213.24621 @@ -219,7 +219,7 @@

    QF_ABV (Unsat Core Track)

    - + cvc5 0 1034836772.7026633.61112 @@ -228,7 +228,7 @@

    QF_ABV (Unsat Core Track)

    - + Bitwuzla 3* 1592715906.3475911.9027 @@ -255,7 +255,6 @@

    QF_ABV (Unsat Core Track)

    - + - diff --git a/archive/2023/results/qf-abvfp-incremental.html b/archive/2023/results/qf-abvfp-incremental.html index 354a4df8..a92b606e 100644 --- a/archive/2023/results/qf-abvfp-incremental.html +++ b/archive/2023/results/qf-abvfp-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ABVFP (Incremental Track)

    Competition results for the QF_ABVFP - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_ABVFP (Incremental Track)

    Parallel Performance Bitwuzla - - + + @@ -124,7 +124,7 @@

    QF_ABVFP (Incremental Track)

    - + Bitwuzla 0 27902633914.7334953.934725 @@ -133,7 +133,7 @@

    QF_ABVFP (Incremental Track)

    - + 2022-Bitwuzlan 0 27897821558.922558.789526 @@ -142,7 +142,7 @@

    QF_ABVFP (Incremental Track)

    - + cvc5 0 2384411637846.511639148.1406321614 @@ -166,7 +166,6 @@

    QF_ABVFP (Incremental Track)

    - + - diff --git a/archive/2023/results/qf-abvfp-model-validation.html b/archive/2023/results/qf-abvfp-model-validation.html index c22c93ef..d6891e24 100644 --- a/archive/2023/results/qf-abvfp-model-validation.html +++ b/archive/2023/results/qf-abvfp-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ABVFP (Model Validation Track)

    Competition results for the QF_ABVFP - + logic - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_ABVFP (Model Validation Track)

    Sequential PerformanceParallel Performance BitwuzlaBitwuzla - - + + @@ -126,7 +126,7 @@

    QF_ABVFP (Model Validation Track)

    - + Bitwuzla Fixedn 0 7045 @@ -137,7 +137,7 @@

    QF_ABVFP (Model Validation Track)

    - + Bitwuzla 0 7045 @@ -148,7 +148,7 @@

    QF_ABVFP (Model Validation Track)

    - + cvc5 0 7045 @@ -170,7 +170,7 @@

    QF_ABVFP (Model Validation Track)

    - + Bitwuzla Fixedn 0 7045427.85441.4560 @@ -179,7 +179,7 @@

    QF_ABVFP (Model Validation Track)

    - + Bitwuzla 0 7045433.021444.4590 @@ -188,7 +188,7 @@

    QF_ABVFP (Model Validation Track)

    - + cvc5 0 70451811.7741779.7851 @@ -212,7 +212,6 @@

    QF_ABVFP (Model Validation Track)

    - + - diff --git a/archive/2023/results/qf-abvfp-proof-exhibition.html b/archive/2023/results/qf-abvfp-proof-exhibition.html index 47873240..a4244339 100644 --- a/archive/2023/results/qf-abvfp-proof-exhibition.html +++ b/archive/2023/results/qf-abvfp-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ABVFP (Proof Exhibition Track)

    Competition results for the QF_ABVFP - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_ABVFP (Proof Exhibition Track)

    - + cvc5-lfsc 0 687 @@ -130,7 +130,7 @@

    QF_ABVFP (Proof Exhibition Track)

    - + cvc5 0 172 @@ -152,7 +152,7 @@

    QF_ABVFP (Proof Exhibition Track)

    - + cvc5-lfsc 0 68731275.42131181.324303290 @@ -161,7 +161,7 @@

    QF_ABVFP (Proof Exhibition Track)

    - + cvc5 0 172616.036599.311818809 @@ -185,7 +185,6 @@

    QF_ABVFP (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/qf-abvfp-single-query.html b/archive/2023/results/qf-abvfp-single-query.html index 7c622c0b..0268798b 100644 --- a/archive/2023/results/qf-abvfp-single-query.html +++ b/archive/2023/results/qf-abvfp-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ABVFP (Single Query Track)

    Competition results for the QF_ABVFP - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_ABVFP (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) BitwuzlaBitwuzlaBitwuzla - - + + Bitwuzla - - + + Bitwuzla - + @@ -131,7 +131,7 @@

    QF_ABVFP (Single Query Track)

    - + 2022-Bitwuzlan 0 593 @@ -142,7 +142,7 @@

    QF_ABVFP (Single Query Track)

    - + Bitwuzla Fixedn 0 592 @@ -153,7 +153,7 @@

    QF_ABVFP (Single Query Track)

    - + Bitwuzla 0 592 @@ -164,7 +164,7 @@

    QF_ABVFP (Single Query Track)

    - + cvc5 0 511 @@ -175,7 +175,7 @@

    QF_ABVFP (Single Query Track)

    - + COLIBRI 0 476 @@ -197,7 +197,7 @@

    QF_ABVFP (Single Query Track)

    - + 2022-Bitwuzlan 0 5933105.4193105.89859310349000 @@ -206,7 +206,7 @@

    QF_ABVFP (Single Query Track)

    - + Bitwuzla Fixedn 0 5921998.2451998.55959210348911 @@ -215,7 +215,7 @@

    QF_ABVFP (Single Query Track)

    - + Bitwuzla 0 5922036.6552012.41559210348911 @@ -224,7 +224,7 @@

    QF_ABVFP (Single Query Track)

    - + cvc5 0 51123600.50223592.094511924198210 @@ -233,7 +233,7 @@

    QF_ABVFP (Single Query Track)

    - + COLIBRI 0 4761606.4951589.4874768938711731 @@ -253,7 +253,7 @@

    QF_ABVFP (Single Query Track)

    - + Bitwuzla 0 103214.613207.715103103004901 @@ -262,7 +262,7 @@

    QF_ABVFP (Single Query Track)

    - + Bitwuzla Fixedn 0 103208.321208.361103103004901 @@ -271,7 +271,7 @@

    QF_ABVFP (Single Query Track)

    - + 2022-Bitwuzlan 0 103251.298251.428103103004900 @@ -280,7 +280,7 @@

    QF_ABVFP (Single Query Track)

    - + cvc5 0 921578.9931569.621929201149010 @@ -289,7 +289,7 @@

    QF_ABVFP (Single Query Track)

    - + COLIBRI 0 89465.108462.922898901449031 @@ -309,7 +309,7 @@

    QF_ABVFP (Single Query Track)

    - + 2022-Bitwuzlan 0 4902854.1212854.471490049001030 @@ -318,7 +318,7 @@

    QF_ABVFP (Single Query Track)

    - + Bitwuzla Fixedn 0 4891789.9251790.198489048911031 @@ -327,7 +327,7 @@

    QF_ABVFP (Single Query Track)

    - + Bitwuzla 0 4891822.0431804.7489048911031 @@ -336,7 +336,7 @@

    QF_ABVFP (Single Query Track)

    - + cvc5 0 41922021.50922022.47341904197110310 @@ -345,7 +345,7 @@

    QF_ABVFP (Single Query Track)

    - + COLIBRI 0 3871141.3871126.565387038710310331 @@ -365,7 +365,7 @@

    QF_ABVFP (Single Query Track)

    - + Bitwuzla 0 578748.705724.2945781014771515 @@ -374,7 +374,7 @@

    QF_ABVFP (Single Query Track)

    - + Bitwuzla Fixedn 0 578726.7726.8375781014771515 @@ -383,7 +383,7 @@

    QF_ABVFP (Single Query Track)

    - + 2022-Bitwuzlan 0 575688.361688.6365751014741818 @@ -392,7 +392,7 @@

    QF_ABVFP (Single Query Track)

    - + COLIBRI 0 4691223.7041206.6124698638312438 @@ -401,7 +401,7 @@

    QF_ABVFP (Single Query Track)

    - + cvc5 0 3691874.5531861.50936974295224152 @@ -425,7 +425,6 @@

    QF_ABVFP (Single Query Track)

    - + - diff --git a/archive/2023/results/qf-abvfp-unsat-core.html b/archive/2023/results/qf-abvfp-unsat-core.html index 5bede405..231a078f 100644 --- a/archive/2023/results/qf-abvfp-unsat-core.html +++ b/archive/2023/results/qf-abvfp-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ABVFP (Unsat Core Track)

    Competition results for the QF_ABVFP - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_ABVFP (Unsat Core Track)

    Sequential PerformanceParallel Performance BitwuzlaBitwuzla - - + + @@ -126,7 +126,7 @@

    QF_ABVFP (Unsat Core Track)

    - + Bitwuzla Fixedn 0 16717 @@ -137,7 +137,7 @@

    QF_ABVFP (Unsat Core Track)

    - + Bitwuzla 0 16717 @@ -148,7 +148,7 @@

    QF_ABVFP (Unsat Core Track)

    - + 2022-z3-4.8.17n 0 16288 @@ -159,7 +159,7 @@

    QF_ABVFP (Unsat Core Track)

    - + cvc5 0 16254 @@ -181,7 +181,7 @@

    QF_ABVFP (Unsat Core Track)

    - + Bitwuzla 0 167171473.3291469.6653 @@ -190,7 +190,7 @@

    QF_ABVFP (Unsat Core Track)

    - + Bitwuzla Fixedn 0 167171470.1881470.2933 @@ -199,7 +199,7 @@

    QF_ABVFP (Unsat Core Track)

    - + 2022-z3-4.8.17n 0 1628830224.03330075.57937 @@ -208,7 +208,7 @@

    QF_ABVFP (Unsat Core Track)

    - + cvc5 0 1625423656.323592.13525 @@ -232,7 +232,6 @@

    QF_ABVFP (Unsat Core Track)

    - + - diff --git a/archive/2023/results/qf-abvfplra-incremental.html b/archive/2023/results/qf-abvfplra-incremental.html index e650f053..ed0df40d 100644 --- a/archive/2023/results/qf-abvfplra-incremental.html +++ b/archive/2023/results/qf-abvfplra-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ABVFPLRA (Incremental Track)

    Competition results for the QF_ABVFPLRA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_ABVFPLRA (Incremental Track)

    Parallel Performance Bitwuzla - - + + @@ -124,7 +124,7 @@

    QF_ABVFPLRA (Incremental Track)

    - + Bitwuzla 0 1876108.58108.8800 @@ -133,7 +133,7 @@

    QF_ABVFPLRA (Incremental Track)

    - + cvc5 0 1876157.87158.5400 @@ -142,7 +142,7 @@

    QF_ABVFPLRA (Incremental Track)

    - + 2022-Bitwuzlan 0 1876345.1345.5800 @@ -166,7 +166,6 @@

    QF_ABVFPLRA (Incremental Track)

    - + - diff --git a/archive/2023/results/qf-abvfplra-model-validation.html b/archive/2023/results/qf-abvfplra-model-validation.html index c38ae3d0..1482ee98 100644 --- a/archive/2023/results/qf-abvfplra-model-validation.html +++ b/archive/2023/results/qf-abvfplra-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ABVFPLRA (Model Validation Track)

    Competition results for the QF_ABVFPLRA - + logic - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_ABVFPLRA (Model Validation Track)

    Sequential PerformanceParallel Performance BitwuzlaBitwuzla - - + + @@ -126,7 +126,7 @@

    QF_ABVFPLRA (Model Validation Track)

    - + Bitwuzla 0 25 @@ -137,7 +137,7 @@

    QF_ABVFPLRA (Model Validation Track)

    - + Bitwuzla Fixedn 0 25 @@ -148,7 +148,7 @@

    QF_ABVFPLRA (Model Validation Track)

    - + cvc5 0 25 @@ -170,7 +170,7 @@

    QF_ABVFPLRA (Model Validation Track)

    - + Bitwuzla Fixedn 0 2527.31119.4030 @@ -179,7 +179,7 @@

    QF_ABVFPLRA (Model Validation Track)

    - + Bitwuzla 0 2519.51919.5450 @@ -188,7 +188,7 @@

    QF_ABVFPLRA (Model Validation Track)

    - + cvc5 0 25111.459111.4790 @@ -212,7 +212,6 @@

    QF_ABVFPLRA (Model Validation Track)

    - + - diff --git a/archive/2023/results/qf-abvfplra-proof-exhibition.html b/archive/2023/results/qf-abvfplra-proof-exhibition.html index df55b12e..885d7834 100644 --- a/archive/2023/results/qf-abvfplra-proof-exhibition.html +++ b/archive/2023/results/qf-abvfplra-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ABVFPLRA (Proof Exhibition Track)

    Competition results for the QF_ABVFPLRA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_ABVFPLRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 3 @@ -130,7 +130,7 @@

    QF_ABVFPLRA (Proof Exhibition Track)

    - + cvc5 0 0 @@ -152,7 +152,7 @@

    QF_ABVFPLRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 3871.877871.77922 @@ -161,7 +161,7 @@

    QF_ABVFPLRA (Proof Exhibition Track)

    - + cvc5 0 00.00.055 @@ -185,7 +185,6 @@

    QF_ABVFPLRA (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/qf-abvfplra-single-query.html b/archive/2023/results/qf-abvfplra-single-query.html index c4c2e160..70d8d0da 100644 --- a/archive/2023/results/qf-abvfplra-single-query.html +++ b/archive/2023/results/qf-abvfplra-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ABVFPLRA (Single Query Track)

    Competition results for the QF_ABVFPLRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_ABVFPLRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) COLIBRICOLIBRIBitwuzla - - + + COLIBRI - - + + COLIBRI - + @@ -131,7 +131,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + COLIBRI 0 25 @@ -142,7 +142,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + Bitwuzla 0 25 @@ -153,7 +153,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + Bitwuzla Fixedn 0 25 @@ -164,7 +164,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + cvc5 0 25 @@ -175,7 +175,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + 2022-Bitwuzlan 0 9 @@ -197,7 +197,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + COLIBRI 0 2542.24442.3862520500 @@ -206,7 +206,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + Bitwuzla 0 2588.54788.5592520500 @@ -215,7 +215,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + Bitwuzla Fixedn 0 2588.64188.6522520500 @@ -224,7 +224,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + cvc5 0 25368.273365.6862520500 @@ -233,7 +233,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + 2022-Bitwuzlan 0 92.2452.247990160 @@ -253,7 +253,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + Bitwuzla 0 2019.31419.32520200050 @@ -262,7 +262,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + Bitwuzla Fixedn 0 2019.55819.56920200050 @@ -271,7 +271,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + COLIBRI 0 2023.55623.63920200050 @@ -280,7 +280,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + cvc5 0 20112.092109.35420200050 @@ -289,7 +289,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + 2022-Bitwuzlan 0 92.2452.2479901150 @@ -309,7 +309,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + COLIBRI 0 518.68818.7475050200 @@ -318,7 +318,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + Bitwuzla Fixedn 0 569.08369.0835050200 @@ -327,7 +327,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + Bitwuzla 0 569.23469.2345050200 @@ -336,7 +336,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + cvc5 0 5256.181256.3335050200 @@ -345,7 +345,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + 2022-Bitwuzlan 0 00.00.00005200 @@ -365,7 +365,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + COLIBRI 0 2542.24442.3862520500 @@ -374,7 +374,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + Bitwuzla 0 2427.53327.5452420411 @@ -383,7 +383,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + Bitwuzla Fixedn 0 2427.79427.8072420411 @@ -392,7 +392,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + cvc5 0 2260.72757.9872219333 @@ -401,7 +401,7 @@

    QF_ABVFPLRA (Single Query Track)

    - + 2022-Bitwuzlan 0 92.2452.247990160 @@ -425,7 +425,6 @@

    QF_ABVFPLRA (Single Query Track)

    - + - diff --git a/archive/2023/results/qf-abvfplra-unsat-core.html b/archive/2023/results/qf-abvfplra-unsat-core.html index 73255391..802a816b 100644 --- a/archive/2023/results/qf-abvfplra-unsat-core.html +++ b/archive/2023/results/qf-abvfplra-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ABVFPLRA (Unsat Core Track)

    Competition results for the QF_ABVFPLRA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_ABVFPLRA (Unsat Core Track)

    Sequential PerformanceParallel Performance BitwuzlaBitwuzla - - + + @@ -126,7 +126,7 @@

    QF_ABVFPLRA (Unsat Core Track)

    - + Bitwuzla 0 2372 @@ -137,7 +137,7 @@

    QF_ABVFPLRA (Unsat Core Track)

    - + Bitwuzla Fixedn 0 2372 @@ -148,7 +148,7 @@

    QF_ABVFPLRA (Unsat Core Track)

    - + cvc5 0 1928 @@ -159,7 +159,7 @@

    QF_ABVFPLRA (Unsat Core Track)

    - + 2022-z3-4.8.17n 0 1259 @@ -181,7 +181,7 @@

    QF_ABVFPLRA (Unsat Core Track)

    - + Bitwuzla 0 2372165.092165.1720 @@ -190,7 +190,7 @@

    QF_ABVFPLRA (Unsat Core Track)

    - + Bitwuzla Fixedn 0 2372168.183168.1870 @@ -199,7 +199,7 @@

    QF_ABVFPLRA (Unsat Core Track)

    - + cvc5 0 1928317.846317.9560 @@ -208,7 +208,7 @@

    QF_ABVFPLRA (Unsat Core Track)

    - + 2022-z3-4.8.17n 0 1259584.491584.7611 @@ -232,7 +232,6 @@

    QF_ABVFPLRA (Unsat Core Track)

    - + - diff --git a/archive/2023/results/qf-adt-bitvec-model-validation.html b/archive/2023/results/qf-adt-bitvec-model-validation.html index 6bf475c9..2b4a4e49 100644 --- a/archive/2023/results/qf-adt-bitvec-model-validation.html +++ b/archive/2023/results/qf-adt-bitvec-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ADT+BitVec (Model Validation Track)

    Competition results for the QF_ADT+BitVec - + division - + in the Model Validation Track.

    @@ -119,7 +119,7 @@

    QF_ADT+BitVec (Model Validation Track)

    - + Bitwuzla Fixedn 0 5213 @@ -130,7 +130,7 @@

    QF_ADT+BitVec (Model Validation Track)

    - + Bitwuzla 0 5213 @@ -141,7 +141,7 @@

    QF_ADT+BitVec (Model Validation Track)

    - + cvc5 0 4839 @@ -152,7 +152,7 @@

    QF_ADT+BitVec (Model Validation Track)

    - + Yices2 0 7 @@ -174,7 +174,7 @@

    QF_ADT+BitVec (Model Validation Track)

    - + Bitwuzla Fixedn 0 52137751.0257790.8753 @@ -183,7 +183,7 @@

    QF_ADT+BitVec (Model Validation Track)

    - + Bitwuzla 0 52137765.2317812.8313 @@ -192,7 +192,7 @@

    QF_ADT+BitVec (Model Validation Track)

    - + cvc5 0 4839128277.483128210.699400 @@ -201,7 +201,7 @@

    QF_ADT+BitVec (Model Validation Track)

    - + Yices2 0 72599.2562599.969 @@ -225,7 +225,6 @@

    QF_ADT+BitVec (Model Validation Track)

    - + - diff --git a/archive/2023/results/qf-adt-linarith-model-validation.html b/archive/2023/results/qf-adt-linarith-model-validation.html index 53e78429..ec50ead1 100644 --- a/archive/2023/results/qf-adt-linarith-model-validation.html +++ b/archive/2023/results/qf-adt-linarith-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ADT+LinArith (Model Validation Track)

    Competition results for the QF_ADT+LinArith - + division - + in the Model Validation Track.

    @@ -119,7 +119,7 @@

    QF_ADT+LinArith (Model Validation Track)

    - + SMTInterpol 0 670 @@ -130,7 +130,7 @@

    QF_ADT+LinArith (Model Validation Track)

    - + cvc5 0 611 @@ -141,7 +141,7 @@

    QF_ADT+LinArith (Model Validation Track)

    - + Yices2 0 32 @@ -163,7 +163,7 @@

    QF_ADT+LinArith (Model Validation Track)

    - + SMTInterpol 0 6703980.4513060.9650 @@ -172,7 +172,7 @@

    QF_ADT+LinArith (Model Validation Track)

    - + cvc5 0 61113558.05213557.83162 @@ -181,7 +181,7 @@

    QF_ADT+LinArith (Model Validation Track)

    - + Yices2 0 320.1270.34211 @@ -205,7 +205,6 @@

    QF_ADT+LinArith (Model Validation Track)

    - + - diff --git a/archive/2023/results/qf-alia-incremental.html b/archive/2023/results/qf-alia-incremental.html index a85b23e5..4d08dfb3 100644 --- a/archive/2023/results/qf-alia-incremental.html +++ b/archive/2023/results/qf-alia-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ALIA (Incremental Track)

    Competition results for the QF_ALIA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_ALIA (Incremental Track)

    Parallel Performance cvc5 - - + + @@ -124,7 +124,7 @@

    QF_ALIA (Incremental Track)

    - + 2020-z3n 0 530398807.67716.4500 @@ -133,7 +133,7 @@

    QF_ALIA (Incremental Track)

    - + cvc5 0 5303982521.462402.7200 @@ -142,7 +142,7 @@

    QF_ALIA (Incremental Track)

    - + SMTInterpol 0 5303984330.83108.4700 @@ -151,7 +151,7 @@

    QF_ALIA (Incremental Track)

    - + Yices2 0 530352467.82381.53462 @@ -160,7 +160,7 @@

    QF_ALIA (Incremental Track)

    - + Yices2 Fixedn 0 530352472.07384.65462 @@ -169,7 +169,7 @@

    QF_ALIA (Incremental Track)

    - + OpenSMT 0 6492052660.9352664.8746547844 @@ -193,7 +193,6 @@

    QF_ALIA (Incremental Track)

    - + - diff --git a/archive/2023/results/qf-alia-model-validation.html b/archive/2023/results/qf-alia-model-validation.html index e0c10379..ab680546 100644 --- a/archive/2023/results/qf-alia-model-validation.html +++ b/archive/2023/results/qf-alia-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ALIA (Model Validation Track)

    Competition results for the QF_ALIA - + logic - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_ALIA (Model Validation Track)

    Sequential PerformanceParallel Performance SMTInterpolSMTInterpol - - + + @@ -126,7 +126,7 @@

    QF_ALIA (Model Validation Track)

    - + SMTInterpol 0 100 @@ -137,7 +137,7 @@

    QF_ALIA (Model Validation Track)

    - + cvc5 0 39 @@ -148,7 +148,7 @@

    QF_ALIA (Model Validation Track)

    - + Yices2 0 0 @@ -170,7 +170,7 @@

    QF_ALIA (Model Validation Track)

    - + SMTInterpol 0 1003327.0012740.6280 @@ -179,7 +179,7 @@

    QF_ALIA (Model Validation Track)

    - + cvc5 0 3913224.52413228.14762 @@ -188,7 +188,7 @@

    QF_ALIA (Model Validation Track)

    - + Yices2 0 00.00.011 @@ -212,7 +212,6 @@

    QF_ALIA (Model Validation Track)

    - + - diff --git a/archive/2023/results/qf-alia-proof-exhibition.html b/archive/2023/results/qf-alia-proof-exhibition.html index 4ea743d4..f12598eb 100644 --- a/archive/2023/results/qf-alia-proof-exhibition.html +++ b/archive/2023/results/qf-alia-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ALIA (Proof Exhibition Track)

    Competition results for the QF_ALIA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_ALIA (Proof Exhibition Track)

    - + SMTInterpol 0 72 @@ -130,7 +130,7 @@

    QF_ALIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 71 @@ -141,7 +141,7 @@

    QF_ALIA (Proof Exhibition Track)

    - + cvc5 0 16 @@ -163,7 +163,7 @@

    QF_ALIA (Proof Exhibition Track)

    - + SMTInterpol 0 721929.8361291.67100 @@ -172,7 +172,7 @@

    QF_ALIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 713228.4353228.50811 @@ -181,7 +181,7 @@

    QF_ALIA (Proof Exhibition Track)

    - + cvc5 0 16482.779478.7085655 @@ -205,7 +205,6 @@

    QF_ALIA (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/qf-alia-single-query.html b/archive/2023/results/qf-alia-single-query.html index 29147194..ecaf7458 100644 --- a/archive/2023/results/qf-alia-single-query.html +++ b/archive/2023/results/qf-alia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ALIA (Single Query Track)

    Competition results for the QF_ALIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_ALIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) SMTInterpolSMTInterpolSMTInterpol - - + + Yices2 - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_ALIA (Single Query Track)

    - + SMTInterpol 0 163 @@ -142,7 +142,7 @@

    QF_ALIA (Single Query Track)

    - + OpenSMT 0 162 @@ -153,7 +153,7 @@

    QF_ALIA (Single Query Track)

    - + Yices2 0 152 @@ -164,7 +164,7 @@

    QF_ALIA (Single Query Track)

    - + 2022-z3-4.8.17n 0 145 @@ -175,7 +175,7 @@

    QF_ALIA (Single Query Track)

    - + cvc5 0 114 @@ -197,7 +197,7 @@

    QF_ALIA (Single Query Track)

    - + SMTInterpol 0 1634786.7983840.1371631016233 @@ -206,7 +206,7 @@

    QF_ALIA (Single Query Track)

    - + OpenSMT 0 16212020.74711967.8831621006244 @@ -215,7 +215,7 @@

    QF_ALIA (Single Query Track)

    - + Yices2 0 1521564.6921565.10515290621414 @@ -224,7 +224,7 @@

    QF_ALIA (Single Query Track)

    - + 2022-z3-4.8.17n 0 14510771.06110772.29214583622121 @@ -233,7 +233,7 @@

    QF_ALIA (Single Query Track)

    - + cvc5 0 11413447.04213359.24611453615252 @@ -253,7 +253,7 @@

    QF_ALIA (Single Query Track)

    - + SMTInterpol 0 1013664.7343082.11610110100653 @@ -262,7 +262,7 @@

    QF_ALIA (Single Query Track)

    - + OpenSMT 0 10011423.9511371.03310010001654 @@ -271,7 +271,7 @@

    QF_ALIA (Single Query Track)

    - + Yices2 0 901557.91558.05290900116514 @@ -280,7 +280,7 @@

    QF_ALIA (Single Query Track)

    - + 2022-z3-4.8.17n 0 839559.7779561.00483830186521 @@ -289,7 +289,7 @@

    QF_ALIA (Single Query Track)

    - + cvc5 0 539809.5769720.76853530486552 @@ -309,7 +309,7 @@

    QF_ALIA (Single Query Track)

    - + Yices2 0 626.7927.05262062010414 @@ -318,7 +318,7 @@

    QF_ALIA (Single Query Track)

    - + OpenSMT 0 62596.797596.856206201044 @@ -327,7 +327,7 @@

    QF_ALIA (Single Query Track)

    - + SMTInterpol 0 621122.064758.0216206201043 @@ -336,7 +336,7 @@

    QF_ALIA (Single Query Track)

    - + 2022-z3-4.8.17n 0 621211.2841211.28862062010421 @@ -345,7 +345,7 @@

    QF_ALIA (Single Query Track)

    - + cvc5 0 613637.4663638.47861061110452 @@ -365,7 +365,7 @@

    QF_ALIA (Single Query Track)

    - + Yices2 0 149111.024111.40114987621717 @@ -374,7 +374,7 @@

    QF_ALIA (Single Query Track)

    - + SMTInterpol 0 146839.416334.76814689572020 @@ -383,7 +383,7 @@

    QF_ALIA (Single Query Track)

    - + OpenSMT 0 115229.04229.09111558575151 @@ -392,7 +392,7 @@

    QF_ALIA (Single Query Track)

    - + 2022-z3-4.8.17n 0 113178.06177.90311354595353 @@ -401,7 +401,7 @@

    QF_ALIA (Single Query Track)

    - + cvc5 0 8325.18325.1568334498383 @@ -425,7 +425,6 @@

    QF_ALIA (Single Query Track)

    - + - diff --git a/archive/2023/results/qf-alia-unsat-core.html b/archive/2023/results/qf-alia-unsat-core.html index e4b9ae83..a19a34de 100644 --- a/archive/2023/results/qf-alia-unsat-core.html +++ b/archive/2023/results/qf-alia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ALIA (Unsat Core Track)

    Competition results for the QF_ALIA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_ALIA (Unsat Core Track)

    Sequential PerformanceParallel Performance SMTInterpolSMTInterpol - - + + @@ -126,7 +126,7 @@

    QF_ALIA (Unsat Core Track)

    - + SMTInterpol 0 696 @@ -137,7 +137,7 @@

    QF_ALIA (Unsat Core Track)

    - + 2022-Yices2n 0 568 @@ -148,7 +148,7 @@

    QF_ALIA (Unsat Core Track)

    - + Yices2 0 568 @@ -159,7 +159,7 @@

    QF_ALIA (Unsat Core Track)

    - + cvc5 0 0 @@ -181,7 +181,7 @@

    QF_ALIA (Unsat Core Track)

    - + SMTInterpol 0 69632.98417.4390 @@ -190,7 +190,7 @@

    QF_ALIA (Unsat Core Track)

    - + 2022-Yices2n 0 5680.1550.3430 @@ -199,7 +199,7 @@

    QF_ALIA (Unsat Core Track)

    - + Yices2 0 5680.1590.3470 @@ -208,7 +208,7 @@

    QF_ALIA (Unsat Core Track)

    - + cvc5 0 00.9570.9450 @@ -232,7 +232,6 @@

    QF_ALIA (Unsat Core Track)

    - + - diff --git a/archive/2023/results/qf-ania-incremental.html b/archive/2023/results/qf-ania-incremental.html index 32349693..ee29e72b 100644 --- a/archive/2023/results/qf-ania-incremental.html +++ b/archive/2023/results/qf-ania-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ANIA (Incremental Track)

    Competition results for the QF_ANIA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_ANIA (Incremental Track)

    Parallel Performance SMTInterpol - - + + @@ -124,7 +124,7 @@

    QF_ANIA (Incremental Track)

    - + 2022-z3-4.8.17n 0 5172456.0352.3900 @@ -133,7 +133,7 @@

    QF_ANIA (Incremental Track)

    - + SMTInterpol 0 51724264.695.5200 @@ -142,7 +142,7 @@

    QF_ANIA (Incremental Track)

    - + cvc5 0 51724252.11247.5200 @@ -151,7 +151,7 @@

    QF_ANIA (Incremental Track)

    - + Yices2 0 87105295.415295.65430145 @@ -160,7 +160,7 @@

    QF_ANIA (Incremental Track)

    - + Yices2 Fixedn 2 52303342.193342.47464942 @@ -184,7 +184,6 @@

    QF_ANIA (Incremental Track)

    - + - diff --git a/archive/2023/results/qf-ania-model-validation.html b/archive/2023/results/qf-ania-model-validation.html index 0c1133ea..cf4f7ec5 100644 --- a/archive/2023/results/qf-ania-model-validation.html +++ b/archive/2023/results/qf-ania-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ANIA (Model Validation Track)

    Competition results for the QF_ANIA - + logic - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_ANIA (Model Validation Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    QF_ANIA (Model Validation Track)

    - + cvc5 0 60 @@ -137,7 +137,7 @@

    QF_ANIA (Model Validation Track)

    - + Yices2 0 0 @@ -159,7 +159,7 @@

    QF_ANIA (Model Validation Track)

    - + cvc5 0 603911.8023905.52233 @@ -168,7 +168,7 @@

    QF_ANIA (Model Validation Track)

    - + Yices2 0 00.00.06 @@ -192,7 +192,6 @@

    QF_ANIA (Model Validation Track)

    - + - diff --git a/archive/2023/results/qf-ania-proof-exhibition.html b/archive/2023/results/qf-ania-proof-exhibition.html index f298dff4..435e6fb6 100644 --- a/archive/2023/results/qf-ania-proof-exhibition.html +++ b/archive/2023/results/qf-ania-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ANIA (Proof Exhibition Track)

    Competition results for the QF_ANIA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_ANIA (Proof Exhibition Track)

    - + SMTInterpol 0 16 @@ -130,7 +130,7 @@

    QF_ANIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 10 @@ -141,7 +141,7 @@

    QF_ANIA (Proof Exhibition Track)

    - + cvc5 0 2 @@ -163,7 +163,7 @@

    QF_ANIA (Proof Exhibition Track)

    - + SMTInterpol 0 161146.234861.7761310 @@ -172,7 +172,7 @@

    QF_ANIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 10254.448254.4571919 @@ -181,7 +181,7 @@

    QF_ANIA (Proof Exhibition Track)

    - + cvc5 0 256.63956.6052727 @@ -205,7 +205,6 @@

    QF_ANIA (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/qf-ania-single-query.html b/archive/2023/results/qf-ania-single-query.html index 63e69e55..d21fe93d 100644 --- a/archive/2023/results/qf-ania-single-query.html +++ b/archive/2023/results/qf-ania-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ANIA (Single Query Track)

    Competition results for the QF_ANIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_ANIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) SMTInterpolSMTInterpolSMTInterpol - - + + SMTInterpol - - + + SMTInterpol - + @@ -131,7 +131,7 @@

    QF_ANIA (Single Query Track)

    - + SMTInterpol 0 104 @@ -142,7 +142,7 @@

    QF_ANIA (Single Query Track)

    - + Yices2 0 97 @@ -153,7 +153,7 @@

    QF_ANIA (Single Query Track)

    - + 2020-CVC4n 0 91 @@ -164,7 +164,7 @@

    QF_ANIA (Single Query Track)

    - + cvc5 0 87 @@ -186,7 +186,7 @@

    QF_ANIA (Single Query Track)

    - + SMTInterpol 0 1041507.6731063.77610489151910 @@ -195,7 +195,7 @@

    QF_ANIA (Single Query Track)

    - + Yices2 0 975512.2685512.7749786112626 @@ -204,7 +204,7 @@

    QF_ANIA (Single Query Track)

    - + 2020-CVC4n 0 913076.1323076.4679178133232 @@ -213,7 +213,7 @@

    QF_ANIA (Single Query Track)

    - + cvc5 0 872944.5182925.2358774133636 @@ -233,7 +233,7 @@

    QF_ANIA (Single Query Track)

    - + SMTInterpol 0 89551.486335.5348989033110 @@ -242,7 +242,7 @@

    QF_ANIA (Single Query Track)

    - + Yices2 0 865432.5115433.0138686063126 @@ -251,7 +251,7 @@

    QF_ANIA (Single Query Track)

    - + 2020-CVC4n 0 781665.8291665.93578780143132 @@ -260,7 +260,7 @@

    QF_ANIA (Single Query Track)

    - + cvc5 0 741426.7021415.92674740183136 @@ -280,7 +280,7 @@

    QF_ANIA (Single Query Track)

    - + SMTInterpol 0 15956.187728.24215015139510 @@ -289,7 +289,7 @@

    QF_ANIA (Single Query Track)

    - + 2020-CVC4n 0 131410.3031410.53213013159532 @@ -298,7 +298,7 @@

    QF_ANIA (Single Query Track)

    - + cvc5 0 131517.8161509.30913013159536 @@ -307,7 +307,7 @@

    QF_ANIA (Single Query Track)

    - + Yices2 0 1179.75679.76111011179526 @@ -327,7 +327,7 @@

    QF_ANIA (Single Query Track)

    - + SMTInterpol 0 97417.232188.3459787102617 @@ -336,7 +336,7 @@

    QF_ANIA (Single Query Track)

    - + 2020-CVC4n 0 76164.218164.229767064747 @@ -345,7 +345,7 @@

    QF_ANIA (Single Query Track)

    - + cvc5 0 73426.501406.78736585050 @@ -354,7 +354,7 @@

    QF_ANIA (Single Query Track)

    - + Yices2 0 61213.389213.418615296262 @@ -378,7 +378,6 @@

    QF_ANIA (Single Query Track)

    - + - diff --git a/archive/2023/results/qf-ania-unsat-core.html b/archive/2023/results/qf-ania-unsat-core.html index 8c45f8aa..22032e1c 100644 --- a/archive/2023/results/qf-ania-unsat-core.html +++ b/archive/2023/results/qf-ania-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_ANIA (Unsat Core Track)

    Competition results for the QF_ANIA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_ANIA (Unsat Core Track)

    Sequential PerformanceParallel Performance SMTInterpolSMTInterpol - - + + @@ -126,7 +126,7 @@

    QF_ANIA (Unsat Core Track)

    - + 2021-cvc5-ucn 0 58962 @@ -137,7 +137,7 @@

    QF_ANIA (Unsat Core Track)

    - + SMTInterpol 0 48813 @@ -148,7 +148,7 @@

    QF_ANIA (Unsat Core Track)

    - + cvc5 0 15253 @@ -170,7 +170,7 @@

    QF_ANIA (Unsat Core Track)

    - + 2021-cvc5-ucn 0 589621571.0031571.416 @@ -179,7 +179,7 @@

    QF_ANIA (Unsat Core Track)

    - + SMTInterpol 0 48813948.925717.4610 @@ -188,7 +188,7 @@

    QF_ANIA (Unsat Core Track)

    - + cvc5 0 152531549.031550.13616 @@ -212,7 +212,6 @@

    QF_ANIA (Unsat Core Track)

    - + - diff --git a/archive/2023/results/qf-array-bitvec-linarith-model-validation.html b/archive/2023/results/qf-array-bitvec-linarith-model-validation.html index ae78ad25..5e897f6e 100644 --- a/archive/2023/results/qf-array-bitvec-linarith-model-validation.html +++ b/archive/2023/results/qf-array-bitvec-linarith-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Array+Bitvec+LinArith (Model Validation Track)

    Competition results for the QF_Array+Bitvec+LinArith - + division - + in the Model Validation Track.

    @@ -119,7 +119,7 @@

    QF_Array+Bitvec+LinArith (Model Validation Track)

    - + cvc5 0 5450 @@ -130,7 +130,7 @@

    QF_Array+Bitvec+LinArith (Model Validation Track)

    - + Bitwuzla Fixedn 0 5213 @@ -141,7 +141,7 @@

    QF_Array+Bitvec+LinArith (Model Validation Track)

    - + Bitwuzla 0 5213 @@ -152,7 +152,7 @@

    QF_Array+Bitvec+LinArith (Model Validation Track)

    - + SMTInterpol 0 670 @@ -163,7 +163,7 @@

    QF_Array+Bitvec+LinArith (Model Validation Track)

    - + Yices2 0 39 @@ -185,7 +185,7 @@

    QF_Array+Bitvec+LinArith (Model Validation Track)

    - + cvc5 0 5450141835.535141768.53462 @@ -194,7 +194,7 @@

    QF_Array+Bitvec+LinArith (Model Validation Track)

    - + Bitwuzla Fixedn 0 52137751.0257790.8753 @@ -203,7 +203,7 @@

    QF_Array+Bitvec+LinArith (Model Validation Track)

    - + Bitwuzla 0 52137765.2317812.8313 @@ -212,7 +212,7 @@

    QF_Array+Bitvec+LinArith (Model Validation Track)

    - + SMTInterpol 0 6703980.4513060.9650 @@ -221,7 +221,7 @@

    QF_Array+Bitvec+LinArith (Model Validation Track)

    - + Yices2 0 392599.3832600.30220 @@ -245,7 +245,6 @@

    QF_Array+Bitvec+LinArith (Model Validation Track)

    - + - diff --git a/archive/2023/results/qf-aufbv-incremental.html b/archive/2023/results/qf-aufbv-incremental.html index 187be4b5..801aa40e 100644 --- a/archive/2023/results/qf-aufbv-incremental.html +++ b/archive/2023/results/qf-aufbv-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AUFBV (Incremental Track)

    Competition results for the QF_AUFBV - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_AUFBV (Incremental Track)

    Parallel Performance Bitwuzla - - + + @@ -124,7 +124,7 @@

    QF_AUFBV (Incremental Track)

    - + Bitwuzla 0 9665058.25061.8211 @@ -133,7 +133,7 @@

    QF_AUFBV (Incremental Track)

    - + Yices2 Fixedn 0 9474651.274654.51207 @@ -142,7 +142,7 @@

    QF_AUFBV (Incremental Track)

    - + 2022-Yices2n 0 9474649.594655.59207 @@ -151,7 +151,7 @@

    QF_AUFBV (Incremental Track)

    - + Yices2 0 9474704.084707.05207 @@ -160,7 +160,7 @@

    QF_AUFBV (Incremental Track)

    - + cvc5 0 7259622.769627.4224216 @@ -184,7 +184,6 @@

    QF_AUFBV (Incremental Track)

    - + - diff --git a/archive/2023/results/qf-aufbv-model-validation.html b/archive/2023/results/qf-aufbv-model-validation.html index ee3a0261..04d7e833 100644 --- a/archive/2023/results/qf-aufbv-model-validation.html +++ b/archive/2023/results/qf-aufbv-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AUFBV (Model Validation Track)

    Competition results for the QF_AUFBV - + logic - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_AUFBV (Model Validation Track)

    Sequential PerformanceParallel Performance BitwuzlaBitwuzla - - + + @@ -126,7 +126,7 @@

    QF_AUFBV (Model Validation Track)

    - + Bitwuzla Fixedn 0 16 @@ -137,7 +137,7 @@

    QF_AUFBV (Model Validation Track)

    - + Bitwuzla 0 16 @@ -148,7 +148,7 @@

    QF_AUFBV (Model Validation Track)

    - + cvc5 0 11 @@ -159,7 +159,7 @@

    QF_AUFBV (Model Validation Track)

    - + Yices2 0 5 @@ -181,7 +181,7 @@

    QF_AUFBV (Model Validation Track)

    - + Bitwuzla Fixedn 0 16711.05711.0890 @@ -190,7 +190,7 @@

    QF_AUFBV (Model Validation Track)

    - + Bitwuzla 0 16723.164723.3690 @@ -199,7 +199,7 @@

    QF_AUFBV (Model Validation Track)

    - + cvc5 0 112442.4832443.12712 @@ -208,7 +208,7 @@

    QF_AUFBV (Model Validation Track)

    - + Yices2 0 52375.5562376.2074 @@ -232,7 +232,6 @@

    QF_AUFBV (Model Validation Track)

    - + - diff --git a/archive/2023/results/qf-aufbv-proof-exhibition.html b/archive/2023/results/qf-aufbv-proof-exhibition.html index ed727d01..dcb2a86f 100644 --- a/archive/2023/results/qf-aufbv-proof-exhibition.html +++ b/archive/2023/results/qf-aufbv-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AUFBV (Proof Exhibition Track)

    Competition results for the QF_AUFBV - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_AUFBV (Proof Exhibition Track)

    - + cvc5-lfsc 0 30 @@ -130,7 +130,7 @@

    QF_AUFBV (Proof Exhibition Track)

    - + cvc5 0 2 @@ -152,7 +152,7 @@

    QF_AUFBV (Proof Exhibition Track)

    - + cvc5-lfsc 0 302263.9852261.521010 @@ -161,7 +161,7 @@

    QF_AUFBV (Proof Exhibition Track)

    - + cvc5 0 20.0920.093838 @@ -185,7 +185,6 @@

    QF_AUFBV (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/qf-aufbv-single-query.html b/archive/2023/results/qf-aufbv-single-query.html index 5adeabbc..8ba53c69 100644 --- a/archive/2023/results/qf-aufbv-single-query.html +++ b/archive/2023/results/qf-aufbv-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AUFBV (Single Query Track)

    Competition results for the QF_AUFBV - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_AUFBV (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) BitwuzlaBitwuzlaBitwuzla - - + + Bitwuzla - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_AUFBV (Single Query Track)

    - + Bitwuzla Fixedn 0 33 @@ -142,7 +142,7 @@

    QF_AUFBV (Single Query Track)

    - + Bitwuzla 0 33 @@ -153,7 +153,7 @@

    QF_AUFBV (Single Query Track)

    - + Yices2 0 27 @@ -164,7 +164,7 @@

    QF_AUFBV (Single Query Track)

    - + Z3-Owl Fixedn 0 20 @@ -175,7 +175,7 @@

    QF_AUFBV (Single Query Track)

    - + 2022-Bitwuzlan 0 19 @@ -186,7 +186,7 @@

    QF_AUFBV (Single Query Track)

    - + cvc5 0 12 @@ -197,7 +197,7 @@

    QF_AUFBV (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 2 6 @@ -208,7 +208,7 @@

    QF_AUFBV (Single Query Track)

    - + Z3-Owl 6 25 @@ -230,7 +230,7 @@

    QF_AUFBV (Single Query Track)

    - + Bitwuzla Fixedn 0 332473.6442474.40933171655 @@ -239,7 +239,7 @@

    QF_AUFBV (Single Query Track)

    - + Bitwuzla 0 332474.6192475.21533171655 @@ -248,7 +248,7 @@

    QF_AUFBV (Single Query Track)

    - + Yices2 0 272527.7592528.1812713141111 @@ -257,7 +257,7 @@

    QF_AUFBV (Single Query Track)

    - + Z3-Owl Fixedn 0 203428.8613428.657207131818 @@ -266,7 +266,7 @@

    QF_AUFBV (Single Query Track)

    - + 2022-Bitwuzlan 0 19411.792411.82819109194 @@ -275,7 +275,7 @@

    QF_AUFBV (Single Query Track)

    - + cvc5 0 1270.14670.14712482626 @@ -284,7 +284,7 @@

    QF_AUFBV (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 2 6330.107161.7076243225 @@ -293,7 +293,7 @@

    QF_AUFBV (Single Query Track)

    - + Z3-Owl 6 251934.1031934.628251411137 @@ -313,7 +313,7 @@

    QF_AUFBV (Single Query Track)

    - + Bitwuzla 0 171901.871902.393171700215 @@ -322,7 +322,7 @@

    QF_AUFBV (Single Query Track)

    - + Bitwuzla Fixedn 0 171907.6581908.207171700215 @@ -331,7 +331,7 @@

    QF_AUFBV (Single Query Track)

    - + Yices2 0 132378.1982378.611313042111 @@ -340,7 +340,7 @@

    QF_AUFBV (Single Query Track)

    - + 2022-Bitwuzlan 0 10369.653369.684101007214 @@ -349,7 +349,7 @@

    QF_AUFBV (Single Query Track)

    - + Z3-Owl Fixedn 0 72157.8232158.373770102118 @@ -358,7 +358,7 @@

    QF_AUFBV (Single Query Track)

    - + cvc5 0 424.27724.282440132126 @@ -367,7 +367,7 @@

    QF_AUFBV (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 2 2215.687130.919220152125 @@ -376,7 +376,7 @@

    QF_AUFBV (Single Query Track)

    - + Z3-Owl 6 141023.5251023.716141403217 @@ -396,7 +396,7 @@

    QF_AUFBV (Single Query Track)

    - + Bitwuzla Fixedn 0 16565.987566.202160161215 @@ -405,7 +405,7 @@

    QF_AUFBV (Single Query Track)

    - + Bitwuzla 0 16572.749572.822160161215 @@ -414,7 +414,7 @@

    QF_AUFBV (Single Query Track)

    - + Yices2 0 14149.562149.5711401432111 @@ -423,7 +423,7 @@

    QF_AUFBV (Single Query Track)

    - + Z3-Owl Fixedn 0 131271.0381270.2841301342118 @@ -432,7 +432,7 @@

    QF_AUFBV (Single Query Track)

    - + Z3-Owl 0 11910.577910.912110116217 @@ -441,7 +441,7 @@

    QF_AUFBV (Single Query Track)

    - + 2022-Bitwuzlan 0 942.13942.1449098214 @@ -450,7 +450,7 @@

    QF_AUFBV (Single Query Track)

    - + cvc5 0 845.86945.86580892126 @@ -459,7 +459,7 @@

    QF_AUFBV (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 0 4114.4230.788404132125 @@ -479,7 +479,7 @@

    QF_AUFBV (Single Query Track)

    - + Yices2 0 2031.50631.522208121818 @@ -488,7 +488,7 @@

    QF_AUFBV (Single Query Track)

    - + Bitwuzla 0 1660.62160.62816792222 @@ -497,7 +497,7 @@

    QF_AUFBV (Single Query Track)

    - + Bitwuzla Fixedn 0 1661.20561.22716792222 @@ -506,7 +506,7 @@

    QF_AUFBV (Single Query Track)

    - + 2022-Bitwuzlan 0 1518.94818.9491578238 @@ -515,7 +515,7 @@

    QF_AUFBV (Single Query Track)

    - + cvc5 0 1132.27332.27411472727 @@ -524,7 +524,7 @@

    QF_AUFBV (Single Query Track)

    - + Z3-Owl Fixedn 0 852.14551.1078263030 @@ -533,7 +533,7 @@

    QF_AUFBV (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 0 5167.17949.3755143331 @@ -542,7 +542,7 @@

    QF_AUFBV (Single Query Track)

    - + Z3-Owl 3 1049.88250.09510462825 @@ -566,7 +566,6 @@

    QF_AUFBV (Single Query Track)

    - + - diff --git a/archive/2023/results/qf-aufbv-unsat-core.html b/archive/2023/results/qf-aufbv-unsat-core.html index 082fb4d5..f856fd9e 100644 --- a/archive/2023/results/qf-aufbv-unsat-core.html +++ b/archive/2023/results/qf-aufbv-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AUFBV (Unsat Core Track)

    Competition results for the QF_AUFBV - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_AUFBV (Unsat Core Track)

    Sequential PerformanceParallel Performance Yices2Yices2 - - + + @@ -126,7 +126,7 @@

    QF_AUFBV (Unsat Core Track)

    - + Yices2 0 18192 @@ -137,7 +137,7 @@

    QF_AUFBV (Unsat Core Track)

    - + Bitwuzla 0 15894 @@ -148,7 +148,7 @@

    QF_AUFBV (Unsat Core Track)

    - + Bitwuzla Fixedn 0 15740 @@ -159,7 +159,7 @@

    QF_AUFBV (Unsat Core Track)

    - + 2022-Bitwuzlan 0 15195 @@ -170,7 +170,7 @@

    QF_AUFBV (Unsat Core Track)

    - + cvc5 0 4427 @@ -192,7 +192,7 @@

    QF_AUFBV (Unsat Core Track)

    - + Yices2 0 18192930.492891.733 @@ -201,7 +201,7 @@

    QF_AUFBV (Unsat Core Track)

    - + Bitwuzla 0 15894692.775693.2122 @@ -210,7 +210,7 @@

    QF_AUFBV (Unsat Core Track)

    - + Bitwuzla Fixedn 0 15740694.437692.0242 @@ -219,7 +219,7 @@

    QF_AUFBV (Unsat Core Track)

    - + 2022-Bitwuzlan 0 15195192.408192.430 @@ -228,7 +228,7 @@

    QF_AUFBV (Unsat Core Track)

    - + cvc5 0 442724.40224.3919 @@ -252,7 +252,6 @@

    QF_AUFBV (Unsat Core Track)

    - + - diff --git a/archive/2023/results/qf-aufbvfp-model-validation.html b/archive/2023/results/qf-aufbvfp-model-validation.html index a89ab38b..4976099a 100644 --- a/archive/2023/results/qf-aufbvfp-model-validation.html +++ b/archive/2023/results/qf-aufbvfp-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AUFBVFP (Model Validation Track)

    Competition results for the QF_AUFBVFP - + logic - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_AUFBVFP (Model Validation Track)

    Sequential PerformanceParallel Performance BitwuzlaBitwuzla - - + + @@ -126,7 +126,7 @@

    QF_AUFBVFP (Model Validation Track)

    - + Bitwuzla Fixedn 0 1 @@ -137,7 +137,7 @@

    QF_AUFBVFP (Model Validation Track)

    - + Bitwuzla 0 1 @@ -148,7 +148,7 @@

    QF_AUFBVFP (Model Validation Track)

    - + cvc5 0 0 @@ -170,7 +170,7 @@

    QF_AUFBVFP (Model Validation Track)

    - + Bitwuzla Fixedn 0 10.4770.4770 @@ -179,7 +179,7 @@

    QF_AUFBVFP (Model Validation Track)

    - + Bitwuzla 0 10.4780.4780 @@ -188,7 +188,7 @@

    QF_AUFBVFP (Model Validation Track)

    - + cvc5 0 00.00.01 @@ -212,7 +212,6 @@

    QF_AUFBVFP (Model Validation Track)

    - + - diff --git a/archive/2023/results/qf-aufbvfp-single-query.html b/archive/2023/results/qf-aufbvfp-single-query.html index 56e15059..37819e2c 100644 --- a/archive/2023/results/qf-aufbvfp-single-query.html +++ b/archive/2023/results/qf-aufbvfp-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AUFBVFP (Single Query Track)

    Competition results for the QF_AUFBVFP - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_AUFBVFP (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) BitwuzlaBitwuzlaBitwuzla - - + + — - - + + Bitwuzla - + @@ -131,7 +131,7 @@

    QF_AUFBVFP (Single Query Track)

    - + 2022-Bitwuzlan 0 1 @@ -142,7 +142,7 @@

    QF_AUFBVFP (Single Query Track)

    - + Bitwuzla Fixedn 0 1 @@ -153,7 +153,7 @@

    QF_AUFBVFP (Single Query Track)

    - + Bitwuzla 0 1 @@ -164,7 +164,7 @@

    QF_AUFBVFP (Single Query Track)

    - + COLIBRI 0 0 @@ -175,7 +175,7 @@

    QF_AUFBVFP (Single Query Track)

    - + cvc5 0 0 @@ -197,7 +197,7 @@

    QF_AUFBVFP (Single Query Track)

    - + 2022-Bitwuzlan 0 10.3350.33611000 @@ -206,7 +206,7 @@

    QF_AUFBVFP (Single Query Track)

    - + Bitwuzla Fixedn 0 10.4740.47411000 @@ -215,7 +215,7 @@

    QF_AUFBVFP (Single Query Track)

    - + Bitwuzla 0 10.4790.47911000 @@ -224,7 +224,7 @@

    QF_AUFBVFP (Single Query Track)

    - + COLIBRI 0 00.00.000011 @@ -233,7 +233,7 @@

    QF_AUFBVFP (Single Query Track)

    - + cvc5 0 00.00.000011 @@ -253,7 +253,7 @@

    QF_AUFBVFP (Single Query Track)

    - + 2022-Bitwuzlan 0 10.3350.336110000 @@ -262,7 +262,7 @@

    QF_AUFBVFP (Single Query Track)

    - + Bitwuzla Fixedn 0 10.4740.474110000 @@ -271,7 +271,7 @@

    QF_AUFBVFP (Single Query Track)

    - + Bitwuzla 0 10.4790.479110000 @@ -280,7 +280,7 @@

    QF_AUFBVFP (Single Query Track)

    - + COLIBRI 0 00.00.0000101 @@ -289,7 +289,7 @@

    QF_AUFBVFP (Single Query Track)

    - + cvc5 0 00.00.0000101 @@ -309,7 +309,7 @@

    QF_AUFBVFP (Single Query Track)

    - + 2022-Bitwuzlan 0 00.00.0000010 @@ -318,7 +318,7 @@

    QF_AUFBVFP (Single Query Track)

    - + COLIBRI 0 00.00.0000011 @@ -327,7 +327,7 @@

    QF_AUFBVFP (Single Query Track)

    - + cvc5 0 00.00.0000011 @@ -336,7 +336,7 @@

    QF_AUFBVFP (Single Query Track)

    - + Bitwuzla 0 00.00.0000010 @@ -345,7 +345,7 @@

    QF_AUFBVFP (Single Query Track)

    - + Bitwuzla Fixedn 0 00.00.0000010 @@ -365,7 +365,7 @@

    QF_AUFBVFP (Single Query Track)

    - + 2022-Bitwuzlan 0 10.3350.33611000 @@ -374,7 +374,7 @@

    QF_AUFBVFP (Single Query Track)

    - + Bitwuzla Fixedn 0 10.4740.47411000 @@ -383,7 +383,7 @@

    QF_AUFBVFP (Single Query Track)

    - + Bitwuzla 0 10.4790.47911000 @@ -392,7 +392,7 @@

    QF_AUFBVFP (Single Query Track)

    - + COLIBRI 0 00.00.000011 @@ -401,7 +401,7 @@

    QF_AUFBVFP (Single Query Track)

    - + cvc5 0 00.00.000011 @@ -425,7 +425,6 @@

    QF_AUFBVFP (Single Query Track)

    - + - diff --git a/archive/2023/results/qf-aufbvlia-incremental.html b/archive/2023/results/qf-aufbvlia-incremental.html index 94c28a40..c2f6d7d5 100644 --- a/archive/2023/results/qf-aufbvlia-incremental.html +++ b/archive/2023/results/qf-aufbvlia-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AUFBVLIA (Incremental Track)

    Competition results for the QF_AUFBVLIA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_AUFBVLIA (Incremental Track)

    Parallel Performance Yices2 - - + + @@ -124,7 +124,7 @@

    QF_AUFBVLIA (Incremental Track)

    - + Yices2 0 5412603.492618.841010 @@ -133,7 +133,7 @@

    QF_AUFBVLIA (Incremental Track)

    - + Yices2 Fixedn 0 5412616.52632.471010 @@ -142,7 +142,7 @@

    QF_AUFBVLIA (Incremental Track)

    - + cvc5 0 48921031.2921047.956262 @@ -166,7 +166,6 @@

    QF_AUFBVLIA (Incremental Track)

    - + - diff --git a/archive/2023/results/qf-aufbvnia-incremental.html b/archive/2023/results/qf-aufbvnia-incremental.html index 59552302..477ad2e1 100644 --- a/archive/2023/results/qf-aufbvnia-incremental.html +++ b/archive/2023/results/qf-aufbvnia-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AUFBVNIA (Incremental Track)

    Competition results for the QF_AUFBVNIA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_AUFBVNIA (Incremental Track)

    Parallel Performance cvc5 - - + + @@ -124,7 +124,7 @@

    QF_AUFBVNIA (Incremental Track)

    - + Yices2 Fixedn 0 692713.462714.4177 @@ -133,7 +133,7 @@

    QF_AUFBVNIA (Incremental Track)

    - + cvc5 0 653753.223754.651111 @@ -142,7 +142,7 @@

    QF_AUFBVNIA (Incremental Track)

    - + Yices2 1 693258.843259.6376 @@ -166,7 +166,6 @@

    QF_AUFBVNIA (Incremental Track)

    - + - diff --git a/archive/2023/results/qf-auflia-incremental.html b/archive/2023/results/qf-auflia-incremental.html index f16e0518..07304aae 100644 --- a/archive/2023/results/qf-auflia-incremental.html +++ b/archive/2023/results/qf-auflia-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AUFLIA (Incremental Track)

    Competition results for the QF_AUFLIA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_AUFLIA (Incremental Track)

    Parallel Performance SMTInterpol - - + + @@ -124,7 +124,7 @@

    QF_AUFLIA (Incremental Track)

    - + 2020-z3n 0 46605164790.134573.93393481 @@ -133,7 +133,7 @@

    QF_AUFLIA (Incremental Track)

    - + SMTInterpol 0 385975116694.0614762.698401133 @@ -142,7 +142,7 @@

    QF_AUFLIA (Incremental Track)

    - + cvc5 0 367763320092.019873.2210222319 @@ -151,7 +151,7 @@

    QF_AUFLIA (Incremental Track)

    - + Yices2 Fixedn 0 30506262539.512376.5316492380 @@ -160,7 +160,7 @@

    QF_AUFLIA (Incremental Track)

    - + Yices2 0 30506262560.692400.0816492380 @@ -169,7 +169,7 @@

    QF_AUFLIA (Incremental Track)

    - + OpenSMT 0 2641666447.936429.244356985 @@ -193,7 +193,6 @@

    QF_AUFLIA (Incremental Track)

    - + - diff --git a/archive/2023/results/qf-auflia-model-validation.html b/archive/2023/results/qf-auflia-model-validation.html index f7cae380..1c6378f6 100644 --- a/archive/2023/results/qf-auflia-model-validation.html +++ b/archive/2023/results/qf-auflia-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AUFLIA (Model Validation Track)

    Competition results for the QF_AUFLIA - + logic - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_AUFLIA (Model Validation Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    QF_AUFLIA (Model Validation Track)

    - + cvc5 0 300 @@ -137,7 +137,7 @@

    QF_AUFLIA (Model Validation Track)

    - + SMTInterpol 0 300 @@ -148,7 +148,7 @@

    QF_AUFLIA (Model Validation Track)

    - + Yices2 0 32 @@ -170,7 +170,7 @@

    QF_AUFLIA (Model Validation Track)

    - + cvc5 0 300172.244172.1410 @@ -179,7 +179,7 @@

    QF_AUFLIA (Model Validation Track)

    - + SMTInterpol 0 300358.079174.6770 @@ -188,7 +188,7 @@

    QF_AUFLIA (Model Validation Track)

    - + Yices2 0 320.1270.3420 @@ -212,7 +212,6 @@

    QF_AUFLIA (Model Validation Track)

    - + - diff --git a/archive/2023/results/qf-auflia-proof-exhibition.html b/archive/2023/results/qf-auflia-proof-exhibition.html index b6d350c2..055690aa 100644 --- a/archive/2023/results/qf-auflia-proof-exhibition.html +++ b/archive/2023/results/qf-auflia-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AUFLIA (Proof Exhibition Track)

    Competition results for the QF_AUFLIA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_AUFLIA (Proof Exhibition Track)

    - + SMTInterpol 0 190 @@ -130,7 +130,7 @@

    QF_AUFLIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 187 @@ -141,7 +141,7 @@

    QF_AUFLIA (Proof Exhibition Track)

    - + cvc5 0 97 @@ -163,7 +163,7 @@

    QF_AUFLIA (Proof Exhibition Track)

    - + SMTInterpol 0 190995.503423.52900 @@ -172,7 +172,7 @@

    QF_AUFLIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 1873320.8573319.19533 @@ -181,7 +181,7 @@

    QF_AUFLIA (Proof Exhibition Track)

    - + cvc5 0 97552.923547.2559393 @@ -205,7 +205,6 @@

    QF_AUFLIA (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/qf-auflia-single-query.html b/archive/2023/results/qf-auflia-single-query.html index dba42816..f468ea78 100644 --- a/archive/2023/results/qf-auflia-single-query.html +++ b/archive/2023/results/qf-auflia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AUFLIA (Single Query Track)

    Competition results for the QF_AUFLIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_AUFLIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Yices2Yices2Yices2 - - + + Yices2 - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_AUFLIA (Single Query Track)

    - + 2022-z3-4.8.17n 0 530 @@ -142,7 +142,7 @@

    QF_AUFLIA (Single Query Track)

    - + Yices2 0 530 @@ -153,7 +153,7 @@

    QF_AUFLIA (Single Query Track)

    - + cvc5 0 530 @@ -164,7 +164,7 @@

    QF_AUFLIA (Single Query Track)

    - + OpenSMT 0 530 @@ -175,7 +175,7 @@

    QF_AUFLIA (Single Query Track)

    - + SMTInterpol 0 530 @@ -197,7 +197,7 @@

    QF_AUFLIA (Single Query Track)

    - + 2022-z3-4.8.17n 0 53017.02416.15153027325700 @@ -206,7 +206,7 @@

    QF_AUFLIA (Single Query Track)

    - + Yices2 0 53020.3821.72553027325700 @@ -215,7 +215,7 @@

    QF_AUFLIA (Single Query Track)

    - + cvc5 0 530223.428223.3953027325700 @@ -224,7 +224,7 @@

    QF_AUFLIA (Single Query Track)

    - + OpenSMT 0 530319.563320.08653027325700 @@ -233,7 +233,7 @@

    QF_AUFLIA (Single Query Track)

    - + SMTInterpol 0 5301130.002495.8953027325700 @@ -253,7 +253,7 @@

    QF_AUFLIA (Single Query Track)

    - + Yices2 0 2734.5255.42273273002570 @@ -262,7 +262,7 @@

    QF_AUFLIA (Single Query Track)

    - + 2022-z3-4.8.17n 0 2736.3565.905273273002570 @@ -271,7 +271,7 @@

    QF_AUFLIA (Single Query Track)

    - + cvc5 0 273120.414120.392273273002570 @@ -280,7 +280,7 @@

    QF_AUFLIA (Single Query Track)

    - + OpenSMT 0 273138.214138.515273273002570 @@ -289,7 +289,7 @@

    QF_AUFLIA (Single Query Track)

    - + SMTInterpol 0 273306.905152.779273273002570 @@ -309,7 +309,7 @@

    QF_AUFLIA (Single Query Track)

    - + 2022-z3-4.8.17n 0 25710.66810.246257025702730 @@ -318,7 +318,7 @@

    QF_AUFLIA (Single Query Track)

    - + Yices2 0 25715.85516.304257025702730 @@ -327,7 +327,7 @@

    QF_AUFLIA (Single Query Track)

    - + cvc5 0 257103.014102.998257025702730 @@ -336,7 +336,7 @@

    QF_AUFLIA (Single Query Track)

    - + OpenSMT 0 257181.349181.571257025702730 @@ -345,7 +345,7 @@

    QF_AUFLIA (Single Query Track)

    - + SMTInterpol 0 257823.097343.111257025702730 @@ -365,7 +365,7 @@

    QF_AUFLIA (Single Query Track)

    - + 2022-z3-4.8.17n 0 53017.02416.15153027325700 @@ -374,7 +374,7 @@

    QF_AUFLIA (Single Query Track)

    - + Yices2 0 53020.3821.72553027325700 @@ -383,7 +383,7 @@

    QF_AUFLIA (Single Query Track)

    - + OpenSMT 0 530319.563320.08653027325700 @@ -392,7 +392,7 @@

    QF_AUFLIA (Single Query Track)

    - + SMTInterpol 0 5301130.002495.8953027325700 @@ -401,7 +401,7 @@

    QF_AUFLIA (Single Query Track)

    - + cvc5 0 529192.818192.77552927325611 @@ -425,7 +425,6 @@

    QF_AUFLIA (Single Query Track)

    - + - diff --git a/archive/2023/results/qf-auflia-unsat-core.html b/archive/2023/results/qf-auflia-unsat-core.html index 468f22d8..31bded8f 100644 --- a/archive/2023/results/qf-auflia-unsat-core.html +++ b/archive/2023/results/qf-auflia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AUFLIA (Unsat Core Track)

    Competition results for the QF_AUFLIA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_AUFLIA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    QF_AUFLIA (Unsat Core Track)

    - + cvc5 0 25417 @@ -137,7 +137,7 @@

    QF_AUFLIA (Unsat Core Track)

    - + 2022-Yices2n 0 18077 @@ -148,7 +148,7 @@

    QF_AUFLIA (Unsat Core Track)

    - + Yices2 0 18077 @@ -159,7 +159,7 @@

    QF_AUFLIA (Unsat Core Track)

    - + SMTInterpol 0 1316 @@ -181,7 +181,7 @@

    QF_AUFLIA (Unsat Core Track)

    - + cvc5 0 25417110.2109.1020 @@ -190,7 +190,7 @@

    QF_AUFLIA (Unsat Core Track)

    - + 2022-Yices2n 0 1807717.16718.5210 @@ -199,7 +199,7 @@

    QF_AUFLIA (Unsat Core Track)

    - + Yices2 0 1807717.58418.7450 @@ -208,7 +208,7 @@

    QF_AUFLIA (Unsat Core Track)

    - + SMTInterpol 0 1316684.529309.0530 @@ -232,7 +232,6 @@

    QF_AUFLIA (Unsat Core Track)

    - + - diff --git a/archive/2023/results/qf-aufnia-model-validation.html b/archive/2023/results/qf-aufnia-model-validation.html index 37a69694..d05cbc93 100644 --- a/archive/2023/results/qf-aufnia-model-validation.html +++ b/archive/2023/results/qf-aufnia-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AUFNIA (Model Validation Track)

    Competition results for the QF_AUFNIA - + logic - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_AUFNIA (Model Validation Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    QF_AUFNIA (Model Validation Track)

    - + cvc5 0 4 @@ -137,7 +137,7 @@

    QF_AUFNIA (Model Validation Track)

    - + Yices2 0 0 @@ -159,7 +159,7 @@

    QF_AUFNIA (Model Validation Track)

    - + cvc5 0 4942.595942.6451 @@ -168,7 +168,7 @@

    QF_AUFNIA (Model Validation Track)

    - + Yices2 0 00.00.00 @@ -192,7 +192,6 @@

    QF_AUFNIA (Model Validation Track)

    - + - diff --git a/archive/2023/results/qf-aufnia-proof-exhibition.html b/archive/2023/results/qf-aufnia-proof-exhibition.html index 391a9097..44f695bc 100644 --- a/archive/2023/results/qf-aufnia-proof-exhibition.html +++ b/archive/2023/results/qf-aufnia-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AUFNIA (Proof Exhibition Track)

    Competition results for the QF_AUFNIA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_AUFNIA (Proof Exhibition Track)

    - + SMTInterpol 0 12 @@ -130,7 +130,7 @@

    QF_AUFNIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 12 @@ -141,7 +141,7 @@

    QF_AUFNIA (Proof Exhibition Track)

    - + cvc5 0 0 @@ -163,7 +163,7 @@

    QF_AUFNIA (Proof Exhibition Track)

    - + SMTInterpol 0 1279.96826.04400 @@ -172,7 +172,7 @@

    QF_AUFNIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 12214.753180.3900 @@ -181,7 +181,7 @@

    QF_AUFNIA (Proof Exhibition Track)

    - + cvc5 0 00.00.01212 @@ -205,7 +205,6 @@

    QF_AUFNIA (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/qf-aufnia-single-query.html b/archive/2023/results/qf-aufnia-single-query.html index 2b87a8ab..0118f7b1 100644 --- a/archive/2023/results/qf-aufnia-single-query.html +++ b/archive/2023/results/qf-aufnia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AUFNIA (Single Query Track)

    Competition results for the QF_AUFNIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_AUFNIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) SMTInterpolSMTInterpolSMTInterpol - - + + SMTInterpol - - + + SMTInterpol - + @@ -131,7 +131,7 @@

    QF_AUFNIA (Single Query Track)

    - + 2020-CVC4n 0 9 @@ -142,7 +142,7 @@

    QF_AUFNIA (Single Query Track)

    - + SMTInterpol 0 9 @@ -153,7 +153,7 @@

    QF_AUFNIA (Single Query Track)

    - + cvc5 0 9 @@ -164,7 +164,7 @@

    QF_AUFNIA (Single Query Track)

    - + Yices2 0 9 @@ -186,7 +186,7 @@

    QF_AUFNIA (Single Query Track)

    - + SMTInterpol 0 948.72616.7692700 @@ -195,7 +195,7 @@

    QF_AUFNIA (Single Query Track)

    - + 2020-CVC4n 0 920.60720.60592700 @@ -204,7 +204,7 @@

    QF_AUFNIA (Single Query Track)

    - + cvc5 0 949.83149.87292700 @@ -213,7 +213,7 @@

    QF_AUFNIA (Single Query Track)

    - + Yices2 0 9282.853235.86892700 @@ -233,7 +233,7 @@

    QF_AUFNIA (Single Query Track)

    - + 2020-CVC4n 0 22.1882.188220070 @@ -242,7 +242,7 @@

    QF_AUFNIA (Single Query Track)

    - + SMTInterpol 0 28.0442.854220070 @@ -251,7 +251,7 @@

    QF_AUFNIA (Single Query Track)

    - + cvc5 0 24.5094.514220070 @@ -260,7 +260,7 @@

    QF_AUFNIA (Single Query Track)

    - + Yices2 0 295.03895.041220070 @@ -280,7 +280,7 @@

    QF_AUFNIA (Single Query Track)

    - + SMTInterpol 0 740.68213.906707020 @@ -289,7 +289,7 @@

    QF_AUFNIA (Single Query Track)

    - + 2020-CVC4n 0 718.41918.417707020 @@ -298,7 +298,7 @@

    QF_AUFNIA (Single Query Track)

    - + cvc5 0 745.32245.358707020 @@ -307,7 +307,7 @@

    QF_AUFNIA (Single Query Track)

    - + Yices2 0 7187.815140.827707020 @@ -327,7 +327,7 @@

    QF_AUFNIA (Single Query Track)

    - + SMTInterpol 0 948.72616.7692700 @@ -336,7 +336,7 @@

    QF_AUFNIA (Single Query Track)

    - + 2020-CVC4n 0 920.60720.60592700 @@ -345,7 +345,7 @@

    QF_AUFNIA (Single Query Track)

    - + cvc5 0 949.83149.87292700 @@ -354,7 +354,7 @@

    QF_AUFNIA (Single Query Track)

    - + Yices2 0 41.5771.57740455 @@ -378,7 +378,6 @@

    QF_AUFNIA (Single Query Track)

    - + - diff --git a/archive/2023/results/qf-aufnia-unsat-core.html b/archive/2023/results/qf-aufnia-unsat-core.html index fbcdbeac..b5ec0531 100644 --- a/archive/2023/results/qf-aufnia-unsat-core.html +++ b/archive/2023/results/qf-aufnia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AUFNIA (Unsat Core Track)

    Competition results for the QF_AUFNIA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_AUFNIA (Unsat Core Track)

    Sequential PerformanceParallel Performance SMTInterpolSMTInterpol - - + + @@ -126,7 +126,7 @@

    QF_AUFNIA (Unsat Core Track)

    - + SMTInterpol 0 20363 @@ -137,7 +137,7 @@

    QF_AUFNIA (Unsat Core Track)

    - + 2021-cvc5-ucn 0 20152 @@ -148,7 +148,7 @@

    QF_AUFNIA (Unsat Core Track)

    - + cvc5 0 16338 @@ -170,7 +170,7 @@

    QF_AUFNIA (Unsat Core Track)

    - + SMTInterpol 0 2036358.78720.830 @@ -179,7 +179,7 @@

    QF_AUFNIA (Unsat Core Track)

    - + 2021-cvc5-ucn 0 20152263.538234.0260 @@ -188,7 +188,7 @@

    QF_AUFNIA (Unsat Core Track)

    - + cvc5 0 1633851.94751.9450 @@ -212,7 +212,6 @@

    QF_AUFNIA (Unsat Core Track)

    - + - diff --git a/archive/2023/results/qf-ax-model-validation.html b/archive/2023/results/qf-ax-model-validation.html index 43a84f91..c81364e3 100644 --- a/archive/2023/results/qf-ax-model-validation.html +++ b/archive/2023/results/qf-ax-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AX (Model Validation Track)

    Competition results for the QF_AX - + logic - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_AX (Model Validation Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    QF_AX (Model Validation Track)

    - + cvc5 0 272 @@ -137,7 +137,7 @@

    QF_AX (Model Validation Track)

    - + SMTInterpol 0 270 @@ -148,7 +148,7 @@

    QF_AX (Model Validation Track)

    - + Yices2 0 0 @@ -170,7 +170,7 @@

    QF_AX (Model Validation Track)

    - + cvc5 0 272161.285157.5420 @@ -179,7 +179,7 @@

    QF_AX (Model Validation Track)

    - + SMTInterpol 0 270295.372145.660 @@ -188,7 +188,7 @@

    QF_AX (Model Validation Track)

    - + Yices2 0 00.00.00 @@ -212,7 +212,6 @@

    QF_AX (Model Validation Track)

    - + - diff --git a/archive/2023/results/qf-ax-proof-exhibition.html b/archive/2023/results/qf-ax-proof-exhibition.html index c3b12f48..08954eb8 100644 --- a/archive/2023/results/qf-ax-proof-exhibition.html +++ b/archive/2023/results/qf-ax-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AX (Proof Exhibition Track)

    Competition results for the QF_AX - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_AX (Proof Exhibition Track)

    - + SMTInterpol 0 150 @@ -130,7 +130,7 @@

    QF_AX (Proof Exhibition Track)

    - + cvc5-lfsc 0 149 @@ -141,7 +141,7 @@

    QF_AX (Proof Exhibition Track)

    - + cvc5 0 37 @@ -163,7 +163,7 @@

    QF_AX (Proof Exhibition Track)

    - + SMTInterpol 0 150936.132429.96900 @@ -172,7 +172,7 @@

    QF_AX (Proof Exhibition Track)

    - + cvc5-lfsc 0 1493226.3243225.35711 @@ -181,7 +181,7 @@

    QF_AX (Proof Exhibition Track)

    - + cvc5 0 37974.699921.988113105 @@ -205,7 +205,6 @@

    QF_AX (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/qf-ax-single-query.html b/archive/2023/results/qf-ax-single-query.html index 8bb87982..f237d486 100644 --- a/archive/2023/results/qf-ax-single-query.html +++ b/archive/2023/results/qf-ax-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AX (Single Query Track)

    Competition results for the QF_AX - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_AX (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Yices2Yices2Yices2 - - + + Yices2 - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_AX (Single Query Track)

    - + 2022-Yices2n 0 279 @@ -142,7 +142,7 @@

    QF_AX (Single Query Track)

    - + Yices2 0 279 @@ -153,7 +153,7 @@

    QF_AX (Single Query Track)

    - + cvc5 0 279 @@ -164,7 +164,7 @@

    QF_AX (Single Query Track)

    - + OpenSMT 0 279 @@ -175,7 +175,7 @@

    QF_AX (Single Query Track)

    - + SMTInterpol 0 279 @@ -197,7 +197,7 @@

    QF_AX (Single Query Track)

    - + 2022-Yices2n 0 2795.6536.232799818100 @@ -206,7 +206,7 @@

    QF_AX (Single Query Track)

    - + Yices2 0 2795.6826.3682799818100 @@ -215,7 +215,7 @@

    QF_AX (Single Query Track)

    - + cvc5 0 279207.395204.3532799818100 @@ -224,7 +224,7 @@

    QF_AX (Single Query Track)

    - + SMTInterpol 0 279886.921345.7952799818100 @@ -233,7 +233,7 @@

    QF_AX (Single Query Track)

    - + OpenSMT 0 279367.552367.6792799818100 @@ -253,7 +253,7 @@

    QF_AX (Single Query Track)

    - + 2022-Yices2n 0 980.7661.129898001810 @@ -262,7 +262,7 @@

    QF_AX (Single Query Track)

    - + Yices2 0 980.7781.1889898001810 @@ -271,7 +271,7 @@

    QF_AX (Single Query Track)

    - + cvc5 0 9814.22114.2069898001810 @@ -280,7 +280,7 @@

    QF_AX (Single Query Track)

    - + OpenSMT 0 9822.76322.8039898001810 @@ -289,7 +289,7 @@

    QF_AX (Single Query Track)

    - + SMTInterpol 0 98140.15262.2469898001810 @@ -309,7 +309,7 @@

    QF_AX (Single Query Track)

    - + 2022-Yices2n 0 1814.8865.1118101810980 @@ -318,7 +318,7 @@

    QF_AX (Single Query Track)

    - + Yices2 0 1814.9045.17918101810980 @@ -327,7 +327,7 @@

    QF_AX (Single Query Track)

    - + cvc5 0 181193.175190.14718101810980 @@ -336,7 +336,7 @@

    QF_AX (Single Query Track)

    - + SMTInterpol 0 181746.769283.54918101810980 @@ -345,7 +345,7 @@

    QF_AX (Single Query Track)

    - + OpenSMT 0 181344.788344.87618101810980 @@ -365,7 +365,7 @@

    QF_AX (Single Query Track)

    - + 2022-Yices2n 0 2795.6536.232799818100 @@ -374,7 +374,7 @@

    QF_AX (Single Query Track)

    - + Yices2 0 2795.6826.3682799818100 @@ -383,7 +383,7 @@

    QF_AX (Single Query Track)

    - + SMTInterpol 0 279886.921345.7952799818100 @@ -392,7 +392,7 @@

    QF_AX (Single Query Track)

    - + cvc5 0 277143.168140.112779817922 @@ -401,7 +401,7 @@

    QF_AX (Single Query Track)

    - + OpenSMT 0 277297.361297.4742779817922 @@ -425,7 +425,6 @@

    QF_AX (Single Query Track)

    - + - diff --git a/archive/2023/results/qf-ax-unsat-core.html b/archive/2023/results/qf-ax-unsat-core.html index ff040964..7ee645fa 100644 --- a/archive/2023/results/qf-ax-unsat-core.html +++ b/archive/2023/results/qf-ax-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_AX (Unsat Core Track)

    Competition results for the QF_AX - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_AX (Unsat Core Track)

    Sequential PerformanceParallel Performance Yices2Yices2 - - + + @@ -126,7 +126,7 @@

    QF_AX (Unsat Core Track)

    - + Yices2 0 38655 @@ -137,7 +137,7 @@

    QF_AX (Unsat Core Track)

    - + 2022-z3-4.8.17n 0 38218 @@ -148,7 +148,7 @@

    QF_AX (Unsat Core Track)

    - + cvc5 0 31756 @@ -159,7 +159,7 @@

    QF_AX (Unsat Core Track)

    - + SMTInterpol 0 470 @@ -181,7 +181,7 @@

    QF_AX (Unsat Core Track)

    - + Yices2 0 386555.9636.5490 @@ -190,7 +190,7 @@

    QF_AX (Unsat Core Track)

    - + 2022-z3-4.8.17n 0 3821823.21722.870 @@ -199,7 +199,7 @@

    QF_AX (Unsat Core Track)

    - + cvc5 0 31756134.609133.5550 @@ -208,7 +208,7 @@

    QF_AX (Unsat Core Track)

    - + SMTInterpol 0 470585.799247.1390 @@ -232,7 +232,6 @@

    QF_AX (Unsat Core Track)

    - + - diff --git a/archive/2023/results/qf-bitvec-incremental.html b/archive/2023/results/qf-bitvec-incremental.html index 3ec41775..3f74eb18 100644 --- a/archive/2023/results/qf-bitvec-incremental.html +++ b/archive/2023/results/qf-bitvec-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Bitvec (Incremental Track)

    Competition results for the QF_Bitvec - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_Bitvec (Incremental Track)

    Parallel Performance Bitwuzla - - + + @@ -124,7 +124,7 @@

    QF_Bitvec (Incremental Track)

    - + Bitwuzla 0 1642327556.427615.7453909 @@ -133,7 +133,7 @@

    QF_Bitvec (Incremental Track)

    - + Yices2 Fixedn 0 1637419604.7719632.69588014 @@ -142,7 +142,7 @@

    QF_Bitvec (Incremental Track)

    - + Yices2 0 1637119426.9219449.1591015 @@ -151,7 +151,7 @@

    QF_Bitvec (Incremental Track)

    - + 2022-Yices2n 0 1636819661.3819684.44594014 @@ -160,7 +160,7 @@

    QF_Bitvec (Incremental Track)

    - + STP 0 1625915362.5915409.37703010 @@ -169,7 +169,7 @@

    QF_Bitvec (Incremental Track)

    - + cvc5 0 1612270306.470354.79840017 @@ -193,7 +193,6 @@

    QF_Bitvec (Incremental Track)

    - + - diff --git a/archive/2023/results/qf-bitvec-model-validation.html b/archive/2023/results/qf-bitvec-model-validation.html index 580da90d..4ec19972 100644 --- a/archive/2023/results/qf-bitvec-model-validation.html +++ b/archive/2023/results/qf-bitvec-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Bitvec (Model Validation Track)

    Competition results for the QF_Bitvec - + division - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_Bitvec (Model Validation Track)

    Sequential PerformanceParallel Performance STPSTP - - + + @@ -126,7 +126,7 @@

    QF_Bitvec (Model Validation Track)

    - + Bitwuzla Fixedn 0 9025 @@ -137,7 +137,7 @@

    QF_Bitvec (Model Validation Track)

    - + STP 0 9024 @@ -148,7 +148,7 @@

    QF_Bitvec (Model Validation Track)

    - + Bitwuzla 0 9019 @@ -159,7 +159,7 @@

    QF_Bitvec (Model Validation Track)

    - + 2020-Bitwuzlan 0 9006 @@ -170,7 +170,7 @@

    QF_Bitvec (Model Validation Track)

    - + Yices2 0 8973 @@ -181,7 +181,7 @@

    QF_Bitvec (Model Validation Track)

    - + cvc5 0 8650 @@ -203,7 +203,7 @@

    QF_Bitvec (Model Validation Track)

    - + STP 0 902457693.47757556.81845 @@ -212,7 +212,7 @@

    QF_Bitvec (Model Validation Track)

    - + Bitwuzla Fixedn 0 902466703.50566531.74345 @@ -221,7 +221,7 @@

    QF_Bitvec (Model Validation Track)

    - + Bitwuzla 0 901971359.34671216.29650 @@ -230,7 +230,7 @@

    QF_Bitvec (Model Validation Track)

    - + 2020-Bitwuzlan 0 900691083.33690763.56863 @@ -239,7 +239,7 @@

    QF_Bitvec (Model Validation Track)

    - + Yices2 0 897399192.94699112.39396 @@ -248,7 +248,7 @@

    QF_Bitvec (Model Validation Track)

    - + cvc5 0 8650134396.618133952.196411 @@ -272,7 +272,6 @@

    QF_Bitvec (Model Validation Track)

    - + - diff --git a/archive/2023/results/qf-bitvec-parallel.html b/archive/2023/results/qf-bitvec-parallel.html index 47df5c57..d54945f0 100644 --- a/archive/2023/results/qf-bitvec-parallel.html +++ b/archive/2023/results/qf-bitvec-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Bitvec (Parallel Track)

    Competition results for the QF_Bitvec - + division - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    QF_Bitvec (Parallel Track)

    - + Z3-Owl 0 00.00007600 @@ -137,7 +137,7 @@

    QF_Bitvec (Parallel Track)

    - + Z3-Owl 0 00.000027490 @@ -157,7 +157,7 @@

    QF_Bitvec (Parallel Track)

    - + Z3-Owl 0 00.000028480 @@ -177,7 +177,7 @@

    QF_Bitvec (Parallel Track)

    - + Z3-Owl 0 00.000076076 @@ -201,7 +201,6 @@

    QF_Bitvec (Parallel Track)

    - + - diff --git a/archive/2023/results/qf-bitvec-proof-exhibition.html b/archive/2023/results/qf-bitvec-proof-exhibition.html index 3e5f0a42..ceb176f9 100644 --- a/archive/2023/results/qf-bitvec-proof-exhibition.html +++ b/archive/2023/results/qf-bitvec-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Bitvec (Proof Exhibition Track)

    Competition results for the QF_Bitvec - + division - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_Bitvec (Proof Exhibition Track)

    - + cvc5-lfsc 0 5321 @@ -130,7 +130,7 @@

    QF_Bitvec (Proof Exhibition Track)

    - + cvc5 0 2072 @@ -152,7 +152,7 @@

    QF_Bitvec (Proof Exhibition Track)

    - + cvc5-lfsc 0 5321193709.56192787.777164801636 @@ -161,7 +161,7 @@

    QF_Bitvec (Proof Exhibition Track)

    - + cvc5 0 207214523.20114193.751489704753 @@ -185,7 +185,6 @@

    QF_Bitvec (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/qf-bitvec-single-query.html b/archive/2023/results/qf-bitvec-single-query.html index 7585d20f..8080b4d9 100644 --- a/archive/2023/results/qf-bitvec-single-query.html +++ b/archive/2023/results/qf-bitvec-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Bitvec (Single Query Track)

    Competition results for the QF_Bitvec - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_Bitvec (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) STPSTPSTP - - + + STP - - + + STP - + @@ -131,7 +131,7 @@

    QF_Bitvec (Single Query Track)

    - + Bitwuzla Fixedn 0 9848 @@ -142,7 +142,7 @@

    QF_Bitvec (Single Query Track)

    - + STP 0 9847 @@ -153,7 +153,7 @@

    QF_Bitvec (Single Query Track)

    - + 2022-STP-fixedn 0 9846 @@ -164,7 +164,7 @@

    QF_Bitvec (Single Query Track)

    - + Bitwuzla 0 9618 @@ -175,7 +175,7 @@

    QF_Bitvec (Single Query Track)

    - + Yices2 0 9384 @@ -186,7 +186,7 @@

    QF_Bitvec (Single Query Track)

    - + cvc5 0 9284 @@ -197,7 +197,7 @@

    QF_Bitvec (Single Query Track)

    - + Z3-Owl Fixedn 0 8894 @@ -208,7 +208,7 @@

    QF_Bitvec (Single Query Track)

    - + Z3-Owl 0 7976 @@ -219,7 +219,7 @@

    QF_Bitvec (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 0 3150 @@ -241,7 +241,7 @@

    QF_Bitvec (Single Query Track)

    - + Bitwuzla Fixedn 0 9848161353.128160973.0419848477250762360236 @@ -250,7 +250,7 @@

    QF_Bitvec (Single Query Track)

    - + STP 0 9847138024.791137675.499847477350742370236 @@ -259,7 +259,7 @@

    QF_Bitvec (Single Query Track)

    - + 2022-STP-fixedn 0 9846136804.166136545.5519846477350732380237 @@ -268,7 +268,7 @@

    QF_Bitvec (Single Query Track)

    - + Bitwuzla 0 9618183578.608182832.7569618477248464660466 @@ -277,7 +277,7 @@

    QF_Bitvec (Single Query Track)

    - + Yices2 0 9384168657.443168411.039384471546697000700 @@ -286,7 +286,7 @@

    QF_Bitvec (Single Query Track)

    - + cvc5 0 9284406582.764405717.8579284463046548000790 @@ -295,7 +295,7 @@

    QF_Bitvec (Single Query Track)

    - + Z3-Owl Fixedn 0 8894578448.56577594.392889446254269119001190 @@ -304,7 +304,7 @@

    QF_Bitvec (Single Query Track)

    - + Z3-Owl 0 7976459878.565458925.184797643763600210802108 @@ -313,7 +313,7 @@

    QF_Bitvec (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 0 3221844319.598728071.85632219182303686304196 @@ -333,7 +333,7 @@

    QF_Bitvec (Single Query Track)

    - + 2022-STP-fixedn 0 477358614.05758418.2477347730425269237 @@ -342,7 +342,7 @@

    QF_Bitvec (Single Query Track)

    - + STP 0 477358730.44658560.425477347730425269236 @@ -351,7 +351,7 @@

    QF_Bitvec (Single Query Track)

    - + Bitwuzla Fixedn 0 477270283.67570053.855477247720435269236 @@ -360,7 +360,7 @@

    QF_Bitvec (Single Query Track)

    - + Bitwuzla 0 477273677.28273273.993477247720435269466 @@ -369,7 +369,7 @@

    QF_Bitvec (Single Query Track)

    - + Yices2 0 4715102940.734102780.6344715471501005269700 @@ -378,7 +378,7 @@

    QF_Bitvec (Single Query Track)

    - + cvc5 0 4630157238.066156705.2954630463001855269790 @@ -387,7 +387,7 @@

    QF_Bitvec (Single Query Track)

    - + Z3-Owl Fixedn 0 4625292970.24292462.84346254625019052691190 @@ -396,7 +396,7 @@

    QF_Bitvec (Single Query Track)

    - + Z3-Owl 0 4376241044.259240420.60543764376043952692108 @@ -405,7 +405,7 @@

    QF_Bitvec (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 0 918254932.101226939.669189180389752694196 @@ -425,7 +425,7 @@

    QF_Bitvec (Single Query Track)

    - + Bitwuzla Fixedn 0 507691069.45490919.186507605076884920236 @@ -434,7 +434,7 @@

    QF_Bitvec (Single Query Track)

    - + STP 0 507479294.34479115.065507405074904920236 @@ -443,7 +443,7 @@

    QF_Bitvec (Single Query Track)

    - + 2022-STP-fixedn 0 507378190.10978127.35507305073914920237 @@ -452,7 +452,7 @@

    QF_Bitvec (Single Query Track)

    - + Bitwuzla 0 4846109901.326109558.7634846048463184920466 @@ -461,7 +461,7 @@

    QF_Bitvec (Single Query Track)

    - + Yices2 0 466965716.70865630.3964669046694954920700 @@ -470,7 +470,7 @@

    QF_Bitvec (Single Query Track)

    - + cvc5 0 4654249344.698249012.5624654046545104920790 @@ -479,7 +479,7 @@

    QF_Bitvec (Single Query Track)

    - + Z3-Owl Fixedn 0 4269285478.32285131.54942690426989549201190 @@ -488,7 +488,7 @@

    QF_Bitvec (Single Query Track)

    - + Z3-Owl 0 3600218834.306218504.579360003600156449202108 @@ -497,7 +497,7 @@

    QF_Bitvec (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 0 2303589387.496501132.195230302303286149204196 @@ -517,7 +517,7 @@

    QF_Bitvec (Single Query Track)

    - + STP 0 927313680.78913473.3019273453947348110810 @@ -526,7 +526,7 @@

    QF_Bitvec (Single Query Track)

    - + 2022-STP-fixedn 0 927213655.70613467.9899272453747358120811 @@ -535,7 +535,7 @@

    QF_Bitvec (Single Query Track)

    - + Bitwuzla Fixedn 0 913925378.21725022.1989139445246879450945 @@ -544,7 +544,7 @@

    QF_Bitvec (Single Query Track)

    - + Bitwuzla 0 880725639.73525125.553880744354372127701277 @@ -553,7 +553,7 @@

    QF_Bitvec (Single Query Track)

    - + Yices2 0 86087610.3877509.509860842004408147601476 @@ -562,7 +562,7 @@

    QF_Bitvec (Single Query Track)

    - + cvc5 0 740628618.09828197.719740636033803267802676 @@ -571,7 +571,7 @@

    QF_Bitvec (Single Query Track)

    - + Z3-Owl Fixedn 0 564928716.1628314.532564925723077443504435 @@ -580,7 +580,7 @@

    QF_Bitvec (Single Query Track)

    - + Z3-Owl 0 536826369.25326052.655536825592809471604716 @@ -589,7 +589,7 @@

    QF_Bitvec (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 0 155738207.06313817.81315572481309852705942 @@ -613,7 +613,6 @@

    QF_Bitvec (Single Query Track)

    - + - diff --git a/archive/2023/results/qf-bitvec-unsat-core.html b/archive/2023/results/qf-bitvec-unsat-core.html index 2c65c695..cc5f7ebe 100644 --- a/archive/2023/results/qf-bitvec-unsat-core.html +++ b/archive/2023/results/qf-bitvec-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Bitvec (Unsat Core Track)

    Competition results for the QF_Bitvec - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_Bitvec (Unsat Core Track)

    Sequential PerformanceParallel Performance Yices2Yices2 - - + + @@ -126,7 +126,7 @@

    QF_Bitvec (Unsat Core Track)

    - + 2022-Bitwuzlan 0 2645285 @@ -137,7 +137,7 @@

    QF_Bitvec (Unsat Core Track)

    - + Bitwuzla Fixedn 0 2626328 @@ -148,7 +148,7 @@

    QF_Bitvec (Unsat Core Track)

    - + Yices2 0 2308469 @@ -159,7 +159,7 @@

    QF_Bitvec (Unsat Core Track)

    - + cvc5 0 688575 @@ -170,7 +170,7 @@

    QF_Bitvec (Unsat Core Track)

    - + Bitwuzla 30* 2540736 @@ -192,7 +192,7 @@

    QF_Bitvec (Unsat Core Track)

    - + 2022-Bitwuzlan 0 264528593867.10293676.365173 @@ -201,7 +201,7 @@

    QF_Bitvec (Unsat Core Track)

    - + Bitwuzla Fixedn 0 262632888412.6688387.745122 @@ -210,7 +210,7 @@

    QF_Bitvec (Unsat Core Track)

    - + Yices2 0 230846942822.94542823.097332 @@ -219,7 +219,7 @@

    QF_Bitvec (Unsat Core Track)

    - + cvc5 0 688575229488.093229361.081595 @@ -228,7 +228,7 @@

    QF_Bitvec (Unsat Core Track)

    - + Bitwuzla 30* 254073696843.95496697.565349 @@ -255,7 +255,6 @@

    QF_Bitvec (Unsat Core Track)

    - + - diff --git a/archive/2023/results/qf-bv-incremental.html b/archive/2023/results/qf-bv-incremental.html index 5316e6e9..2ef7f32a 100644 --- a/archive/2023/results/qf-bv-incremental.html +++ b/archive/2023/results/qf-bv-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_BV (Incremental Track)

    Competition results for the QF_BV - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_BV (Incremental Track)

    Parallel Performance Bitwuzla - - + + @@ -124,7 +124,7 @@

    QF_BV (Incremental Track)

    - + Bitwuzla 0 1642327556.427615.745399 @@ -133,7 +133,7 @@

    QF_BV (Incremental Track)

    - + Yices2 Fixedn 0 1637419604.7719632.6958814 @@ -142,7 +142,7 @@

    QF_BV (Incremental Track)

    - + Yices2 0 1637119426.9219449.159115 @@ -151,7 +151,7 @@

    QF_BV (Incremental Track)

    - + 2022-Yices2n 0 1636819661.3819684.4459414 @@ -160,7 +160,7 @@

    QF_BV (Incremental Track)

    - + STP 0 1625915362.5915409.3770310 @@ -169,7 +169,7 @@

    QF_BV (Incremental Track)

    - + cvc5 0 1612270306.470354.7984017 @@ -193,7 +193,6 @@

    QF_BV (Incremental Track)

    - + - diff --git a/archive/2023/results/qf-bv-model-validation.html b/archive/2023/results/qf-bv-model-validation.html index 917fb83e..c3ea2640 100644 --- a/archive/2023/results/qf-bv-model-validation.html +++ b/archive/2023/results/qf-bv-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_BV (Model Validation Track)

    Competition results for the QF_BV - + logic - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_BV (Model Validation Track)

    Sequential PerformanceParallel Performance STPSTP - - + + @@ -126,7 +126,7 @@

    QF_BV (Model Validation Track)

    - + Bitwuzla Fixedn 0 9025 @@ -137,7 +137,7 @@

    QF_BV (Model Validation Track)

    - + STP 0 9024 @@ -148,7 +148,7 @@

    QF_BV (Model Validation Track)

    - + Bitwuzla 0 9019 @@ -159,7 +159,7 @@

    QF_BV (Model Validation Track)

    - + 2020-Bitwuzlan 0 9006 @@ -170,7 +170,7 @@

    QF_BV (Model Validation Track)

    - + Yices2 0 8973 @@ -181,7 +181,7 @@

    QF_BV (Model Validation Track)

    - + cvc5 0 8650 @@ -203,7 +203,7 @@

    QF_BV (Model Validation Track)

    - + STP 0 902457693.47757556.81845 @@ -212,7 +212,7 @@

    QF_BV (Model Validation Track)

    - + Bitwuzla Fixedn 0 902466703.50566531.74345 @@ -221,7 +221,7 @@

    QF_BV (Model Validation Track)

    - + Bitwuzla 0 901971359.34671216.29650 @@ -230,7 +230,7 @@

    QF_BV (Model Validation Track)

    - + 2020-Bitwuzlan 0 900691083.33690763.56863 @@ -239,7 +239,7 @@

    QF_BV (Model Validation Track)

    - + Yices2 0 897399192.94699112.39396 @@ -248,7 +248,7 @@

    QF_BV (Model Validation Track)

    - + cvc5 0 8650134396.618133952.196411 @@ -272,7 +272,6 @@

    QF_BV (Model Validation Track)

    - + - diff --git a/archive/2023/results/qf-bv-parallel.html b/archive/2023/results/qf-bv-parallel.html index cadb4fc7..d8151057 100644 --- a/archive/2023/results/qf-bv-parallel.html +++ b/archive/2023/results/qf-bv-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_BV (Parallel Track)

    Competition results for the QF_BV - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    QF_BV (Parallel Track)

    - + Z3-Owl 0 00.0000760 @@ -137,7 +137,7 @@

    QF_BV (Parallel Track)

    - + Z3-Owl 0 00.000027490 @@ -157,7 +157,7 @@

    QF_BV (Parallel Track)

    - + Z3-Owl 0 00.000028480 @@ -177,7 +177,7 @@

    QF_BV (Parallel Track)

    - + Z3-Owl 0 00.00007676 @@ -201,7 +201,6 @@

    QF_BV (Parallel Track)

    - + - diff --git a/archive/2023/results/qf-bv-proof-exhibition.html b/archive/2023/results/qf-bv-proof-exhibition.html index aa1423fd..e32e69b5 100644 --- a/archive/2023/results/qf-bv-proof-exhibition.html +++ b/archive/2023/results/qf-bv-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_BV (Proof Exhibition Track)

    Competition results for the QF_BV - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_BV (Proof Exhibition Track)

    - + cvc5-lfsc 0 5321 @@ -130,7 +130,7 @@

    QF_BV (Proof Exhibition Track)

    - + cvc5 0 2072 @@ -152,7 +152,7 @@

    QF_BV (Proof Exhibition Track)

    - + cvc5-lfsc 0 5321193709.56192787.77716481636 @@ -161,7 +161,7 @@

    QF_BV (Proof Exhibition Track)

    - + cvc5 0 207214523.20114193.75148974753 @@ -185,7 +185,6 @@

    QF_BV (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/qf-bv-single-query.html b/archive/2023/results/qf-bv-single-query.html index 5fb780e7..6922fc63 100644 --- a/archive/2023/results/qf-bv-single-query.html +++ b/archive/2023/results/qf-bv-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_BV (Single Query Track)

    Competition results for the QF_BV - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_BV (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) STPSTPSTP - - + + STP - - + + STP - + @@ -131,7 +131,7 @@

    QF_BV (Single Query Track)

    - + Bitwuzla Fixedn 0 9848 @@ -142,7 +142,7 @@

    QF_BV (Single Query Track)

    - + STP 0 9847 @@ -153,7 +153,7 @@

    QF_BV (Single Query Track)

    - + 2022-STP-fixedn 0 9846 @@ -164,7 +164,7 @@

    QF_BV (Single Query Track)

    - + Bitwuzla 0 9618 @@ -175,7 +175,7 @@

    QF_BV (Single Query Track)

    - + Yices2 0 9384 @@ -186,7 +186,7 @@

    QF_BV (Single Query Track)

    - + cvc5 0 9284 @@ -197,7 +197,7 @@

    QF_BV (Single Query Track)

    - + Z3-Owl Fixedn 0 8894 @@ -208,7 +208,7 @@

    QF_BV (Single Query Track)

    - + Z3-Owl 0 7976 @@ -219,7 +219,7 @@

    QF_BV (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 0 3150 @@ -241,7 +241,7 @@

    QF_BV (Single Query Track)

    - + Bitwuzla Fixedn 0 9848161353.128160973.041984847725076236236 @@ -250,7 +250,7 @@

    QF_BV (Single Query Track)

    - + STP 0 9847138024.791137675.49984747735074237236 @@ -259,7 +259,7 @@

    QF_BV (Single Query Track)

    - + 2022-STP-fixedn 0 9846136804.166136545.551984647735073238237 @@ -268,7 +268,7 @@

    QF_BV (Single Query Track)

    - + Bitwuzla 0 9618183578.608182832.756961847724846466466 @@ -277,7 +277,7 @@

    QF_BV (Single Query Track)

    - + Yices2 0 9384168657.443168411.03938447154669700700 @@ -286,7 +286,7 @@

    QF_BV (Single Query Track)

    - + cvc5 0 9284406582.764405717.857928446304654800790 @@ -295,7 +295,7 @@

    QF_BV (Single Query Track)

    - + Z3-Owl Fixedn 0 8894578448.56577594.39288944625426911901190 @@ -304,7 +304,7 @@

    QF_BV (Single Query Track)

    - + Z3-Owl 0 7976459878.565458925.18479764376360021082108 @@ -313,7 +313,7 @@

    QF_BV (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 0 3221844319.598728071.8563221918230368634196 @@ -333,7 +333,7 @@

    QF_BV (Single Query Track)

    - + 2022-STP-fixedn 0 477358614.05758418.2477347730425269237 @@ -342,7 +342,7 @@

    QF_BV (Single Query Track)

    - + STP 0 477358730.44658560.425477347730425269236 @@ -351,7 +351,7 @@

    QF_BV (Single Query Track)

    - + Bitwuzla Fixedn 0 477270283.67570053.855477247720435269236 @@ -360,7 +360,7 @@

    QF_BV (Single Query Track)

    - + Bitwuzla 0 477273677.28273273.993477247720435269466 @@ -369,7 +369,7 @@

    QF_BV (Single Query Track)

    - + Yices2 0 4715102940.734102780.6344715471501005269700 @@ -378,7 +378,7 @@

    QF_BV (Single Query Track)

    - + cvc5 0 4630157238.066156705.2954630463001855269790 @@ -387,7 +387,7 @@

    QF_BV (Single Query Track)

    - + Z3-Owl Fixedn 0 4625292970.24292462.84346254625019052691190 @@ -396,7 +396,7 @@

    QF_BV (Single Query Track)

    - + Z3-Owl 0 4376241044.259240420.60543764376043952692108 @@ -405,7 +405,7 @@

    QF_BV (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 0 918254932.101226939.669189180389752694196 @@ -425,7 +425,7 @@

    QF_BV (Single Query Track)

    - + Bitwuzla Fixedn 0 507691069.45490919.186507605076884920236 @@ -434,7 +434,7 @@

    QF_BV (Single Query Track)

    - + STP 0 507479294.34479115.065507405074904920236 @@ -443,7 +443,7 @@

    QF_BV (Single Query Track)

    - + 2022-STP-fixedn 0 507378190.10978127.35507305073914920237 @@ -452,7 +452,7 @@

    QF_BV (Single Query Track)

    - + Bitwuzla 0 4846109901.326109558.7634846048463184920466 @@ -461,7 +461,7 @@

    QF_BV (Single Query Track)

    - + Yices2 0 466965716.70865630.3964669046694954920700 @@ -470,7 +470,7 @@

    QF_BV (Single Query Track)

    - + cvc5 0 4654249344.698249012.5624654046545104920790 @@ -479,7 +479,7 @@

    QF_BV (Single Query Track)

    - + Z3-Owl Fixedn 0 4269285478.32285131.54942690426989549201190 @@ -488,7 +488,7 @@

    QF_BV (Single Query Track)

    - + Z3-Owl 0 3600218834.306218504.579360003600156449202108 @@ -497,7 +497,7 @@

    QF_BV (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 0 2303589387.496501132.195230302303286149204196 @@ -517,7 +517,7 @@

    QF_BV (Single Query Track)

    - + STP 0 927313680.78913473.301927345394734811810 @@ -526,7 +526,7 @@

    QF_BV (Single Query Track)

    - + 2022-STP-fixedn 0 927213655.70613467.989927245374735812811 @@ -535,7 +535,7 @@

    QF_BV (Single Query Track)

    - + Bitwuzla Fixedn 0 913925378.21725022.198913944524687945945 @@ -544,7 +544,7 @@

    QF_BV (Single Query Track)

    - + Bitwuzla 0 880725639.73525125.55388074435437212771277 @@ -553,7 +553,7 @@

    QF_BV (Single Query Track)

    - + Yices2 0 86087610.3877509.50986084200440814761476 @@ -562,7 +562,7 @@

    QF_BV (Single Query Track)

    - + cvc5 0 740628618.09828197.71974063603380326782676 @@ -571,7 +571,7 @@

    QF_BV (Single Query Track)

    - + Z3-Owl Fixedn 0 564928716.1628314.53256492572307744354435 @@ -580,7 +580,7 @@

    QF_BV (Single Query Track)

    - + Z3-Owl 0 536826369.25326052.65553682559280947164716 @@ -589,7 +589,7 @@

    QF_BV (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 0 155738207.06313817.8131557248130985275942 @@ -613,7 +613,6 @@

    QF_BV (Single Query Track)

    - + - diff --git a/archive/2023/results/qf-bv-unsat-core.html b/archive/2023/results/qf-bv-unsat-core.html index c30c5a35..182d3302 100644 --- a/archive/2023/results/qf-bv-unsat-core.html +++ b/archive/2023/results/qf-bv-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_BV (Unsat Core Track)

    Competition results for the QF_BV - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_BV (Unsat Core Track)

    Sequential PerformanceParallel Performance Yices2Yices2 - - + + @@ -126,7 +126,7 @@

    QF_BV (Unsat Core Track)

    - + 2022-Bitwuzlan 0 2645285 @@ -137,7 +137,7 @@

    QF_BV (Unsat Core Track)

    - + Bitwuzla Fixedn 0 2626328 @@ -148,7 +148,7 @@

    QF_BV (Unsat Core Track)

    - + Yices2 0 2308469 @@ -159,7 +159,7 @@

    QF_BV (Unsat Core Track)

    - + cvc5 0 688575 @@ -170,7 +170,7 @@

    QF_BV (Unsat Core Track)

    - + Bitwuzla 30* 2540736 @@ -192,7 +192,7 @@

    QF_BV (Unsat Core Track)

    - + 2022-Bitwuzlan 0 264528593867.10293676.365173 @@ -201,7 +201,7 @@

    QF_BV (Unsat Core Track)

    - + Bitwuzla Fixedn 0 262632888412.6688387.745122 @@ -210,7 +210,7 @@

    QF_BV (Unsat Core Track)

    - + Yices2 0 230846942822.94542823.097332 @@ -219,7 +219,7 @@

    QF_BV (Unsat Core Track)

    - + cvc5 0 688575229488.093229361.081595 @@ -228,7 +228,7 @@

    QF_BV (Unsat Core Track)

    - + Bitwuzla 30* 254073696843.95496697.565349 @@ -255,7 +255,6 @@

    QF_BV (Unsat Core Track)

    - + - diff --git a/archive/2023/results/qf-bvfp-incremental.html b/archive/2023/results/qf-bvfp-incremental.html index 4316266a..c3a48ca0 100644 --- a/archive/2023/results/qf-bvfp-incremental.html +++ b/archive/2023/results/qf-bvfp-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_BVFP (Incremental Track)

    Competition results for the QF_BVFP - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_BVFP (Incremental Track)

    Parallel Performance Bitwuzla - - + + @@ -124,7 +124,7 @@

    QF_BVFP (Incremental Track)

    - + 2022-Bitwuzlan 0 20704171.764194.987 @@ -133,7 +133,7 @@

    QF_BVFP (Incremental Track)

    - + Bitwuzla 0 20662557.232575.971210 @@ -142,7 +142,7 @@

    QF_BVFP (Incremental Track)

    - + cvc5 0 20644565.024586.671412 @@ -166,7 +166,6 @@

    QF_BVFP (Incremental Track)

    - + - diff --git a/archive/2023/results/qf-bvfp-model-validation.html b/archive/2023/results/qf-bvfp-model-validation.html index 479b67af..2216cb10 100644 --- a/archive/2023/results/qf-bvfp-model-validation.html +++ b/archive/2023/results/qf-bvfp-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_BVFP (Model Validation Track)

    Competition results for the QF_BVFP - + logic - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_BVFP (Model Validation Track)

    Sequential PerformanceParallel Performance BitwuzlaBitwuzla - - + + @@ -126,7 +126,7 @@

    QF_BVFP (Model Validation Track)

    - + Bitwuzla 0 7028 @@ -137,7 +137,7 @@

    QF_BVFP (Model Validation Track)

    - + Bitwuzla Fixedn 0 7028 @@ -148,7 +148,7 @@

    QF_BVFP (Model Validation Track)

    - + cvc5 0 7028 @@ -159,7 +159,7 @@

    QF_BVFP (Model Validation Track)

    - + 2022-Bitwuzlan 0 7027 @@ -181,7 +181,7 @@

    QF_BVFP (Model Validation Track)

    - + Bitwuzla Fixedn 0 7028355.497367.3231 @@ -190,7 +190,7 @@

    QF_BVFP (Model Validation Track)

    - + Bitwuzla 0 7028355.095367.991 @@ -199,7 +199,7 @@

    QF_BVFP (Model Validation Track)

    - + cvc5 0 70281641.8591637.5572 @@ -208,7 +208,7 @@

    QF_BVFP (Model Validation Track)

    - + 2022-Bitwuzlan 0 7027480.984484.3371 @@ -232,7 +232,6 @@

    QF_BVFP (Model Validation Track)

    - + - diff --git a/archive/2023/results/qf-bvfp-proof-exhibition.html b/archive/2023/results/qf-bvfp-proof-exhibition.html index 0baf4c2a..a3f0776a 100644 --- a/archive/2023/results/qf-bvfp-proof-exhibition.html +++ b/archive/2023/results/qf-bvfp-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_BVFP (Proof Exhibition Track)

    Competition results for the QF_BVFP - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_BVFP (Proof Exhibition Track)

    - + cvc5-lfsc 0 579 @@ -130,7 +130,7 @@

    QF_BVFP (Proof Exhibition Track)

    - + cvc5 0 147 @@ -152,7 +152,7 @@

    QF_BVFP (Proof Exhibition Track)

    - + cvc5-lfsc 0 57922015.79721984.075218212 @@ -161,7 +161,7 @@

    QF_BVFP (Proof Exhibition Track)

    - + cvc5 0 147484.259479.696650644 @@ -185,7 +185,6 @@

    QF_BVFP (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/qf-bvfp-single-query.html b/archive/2023/results/qf-bvfp-single-query.html index 082f08d0..d345527e 100644 --- a/archive/2023/results/qf-bvfp-single-query.html +++ b/archive/2023/results/qf-bvfp-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_BVFP (Single Query Track)

    Competition results for the QF_BVFP - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_BVFP (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) BitwuzlaBitwuzlaBitwuzla - - + + Bitwuzla - - + + Bitwuzla - + @@ -131,7 +131,7 @@

    QF_BVFP (Single Query Track)

    - + Bitwuzla 0 413 @@ -142,7 +142,7 @@

    QF_BVFP (Single Query Track)

    - + Bitwuzla Fixedn 0 413 @@ -153,7 +153,7 @@

    QF_BVFP (Single Query Track)

    - + 2022-Bitwuzlan 0 413 @@ -164,7 +164,7 @@

    QF_BVFP (Single Query Track)

    - + Z3-Owl Fixedn 0 401 @@ -175,7 +175,7 @@

    QF_BVFP (Single Query Track)

    - + cvc5 0 389 @@ -186,7 +186,7 @@

    QF_BVFP (Single Query Track)

    - + COLIBRI 0 366 @@ -197,7 +197,7 @@

    QF_BVFP (Single Query Track)

    - + Z3-Owl 243 171 @@ -219,7 +219,7 @@

    QF_BVFP (Single Query Track)

    - + Bitwuzla Fixedn 0 4132339.5572279.10541315026311 @@ -228,7 +228,7 @@

    QF_BVFP (Single Query Track)

    - + Bitwuzla 0 4132286.3742286.46141315026311 @@ -237,7 +237,7 @@

    QF_BVFP (Single Query Track)

    - + 2022-Bitwuzlan 0 4132402.4822401.98841314926411 @@ -246,7 +246,7 @@

    QF_BVFP (Single Query Track)

    - + Z3-Owl Fixedn 0 40124967.324898.0994011502511313 @@ -255,7 +255,7 @@

    QF_BVFP (Single Query Track)

    - + cvc5 0 3893336.0943324.587389146243253 @@ -264,7 +264,7 @@

    QF_BVFP (Single Query Track)

    - + COLIBRI 0 366625.607627.383661432234819 @@ -273,7 +273,7 @@

    QF_BVFP (Single Query Track)

    - + Z3-Owl 243 171157.616157.672171150212430 @@ -293,7 +293,7 @@

    QF_BVFP (Single Query Track)

    - + Bitwuzla 0 150186.186186.31150150002641 @@ -302,7 +302,7 @@

    QF_BVFP (Single Query Track)

    - + Bitwuzla Fixedn 0 150194.14186.541150150002641 @@ -311,7 +311,7 @@

    QF_BVFP (Single Query Track)

    - + Z3-Owl Fixedn 0 1506192.4976150.3911501500026413 @@ -320,7 +320,7 @@

    QF_BVFP (Single Query Track)

    - + 2022-Bitwuzlan 0 149343.779343.878149149012641 @@ -329,7 +329,7 @@

    QF_BVFP (Single Query Track)

    - + cvc5 0 1461339.3081337.699146146042643 @@ -338,7 +338,7 @@

    QF_BVFP (Single Query Track)

    - + COLIBRI 0 143203.883204.1441431430726419 @@ -347,7 +347,7 @@

    QF_BVFP (Single Query Track)

    - + Z3-Owl 243 150109.521109.559150150002640 @@ -367,7 +367,7 @@

    QF_BVFP (Single Query Track)

    - + 2022-Bitwuzlan 0 2642058.7032058.11264026401501 @@ -376,7 +376,7 @@

    QF_BVFP (Single Query Track)

    - + Bitwuzla Fixedn 0 2632145.4162092.563263026311501 @@ -385,7 +385,7 @@

    QF_BVFP (Single Query Track)

    - + Bitwuzla 0 2632100.1882100.151263026311501 @@ -394,7 +394,7 @@

    QF_BVFP (Single Query Track)

    - + Z3-Owl Fixedn 0 25118774.80318747.70825102511315013 @@ -403,7 +403,7 @@

    QF_BVFP (Single Query Track)

    - + cvc5 0 2431996.7861986.8882430243211503 @@ -412,7 +412,7 @@

    QF_BVFP (Single Query Track)

    - + COLIBRI 0 223421.724423.23622302234115019 @@ -421,7 +421,7 @@

    QF_BVFP (Single Query Track)

    - + Z3-Owl 0 2148.09548.113210212431500 @@ -441,7 +441,7 @@

    QF_BVFP (Single Query Track)

    - + Bitwuzla Fixedn 0 403462.581453.5744031492541111 @@ -450,7 +450,7 @@

    QF_BVFP (Single Query Track)

    - + Bitwuzla 0 403455.438455.174031492541111 @@ -459,7 +459,7 @@

    QF_BVFP (Single Query Track)

    - + 2022-Bitwuzlan 0 400422.591421.8824001472531414 @@ -468,7 +468,7 @@

    QF_BVFP (Single Query Track)

    - + COLIBRI 0 362412.665414.3813621432195223 @@ -477,7 +477,7 @@

    QF_BVFP (Single Query Track)

    - + cvc5 0 3591275.6711263.7823591272325533 @@ -486,7 +486,7 @@

    QF_BVFP (Single Query Track)

    - + Z3-Owl Fixedn 0 2612258.8852260.618261108153153153 @@ -495,7 +495,7 @@

    QF_BVFP (Single Query Track)

    - + Z3-Owl 243 171157.616157.672171150212430 @@ -519,7 +519,6 @@

    QF_BVFP (Single Query Track)

    - + - diff --git a/archive/2023/results/qf-bvfp-unsat-core.html b/archive/2023/results/qf-bvfp-unsat-core.html index e8952e2b..376ccc3b 100644 --- a/archive/2023/results/qf-bvfp-unsat-core.html +++ b/archive/2023/results/qf-bvfp-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_BVFP (Unsat Core Track)

    Competition results for the QF_BVFP - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_BVFP (Unsat Core Track)

    Sequential PerformanceParallel Performance BitwuzlaBitwuzla - - + + @@ -126,7 +126,7 @@

    QF_BVFP (Unsat Core Track)

    - + 2022-z3-4.8.17n 0 14446 @@ -137,7 +137,7 @@

    QF_BVFP (Unsat Core Track)

    - + Bitwuzla Fixedn 0 14297 @@ -148,7 +148,7 @@

    QF_BVFP (Unsat Core Track)

    - + Bitwuzla 0 14297 @@ -159,7 +159,7 @@

    QF_BVFP (Unsat Core Track)

    - + cvc5 0 13933 @@ -181,7 +181,7 @@

    QF_BVFP (Unsat Core Track)

    - + 2022-z3-4.8.17n 0 1444615030.32215025.42719 @@ -190,7 +190,7 @@

    QF_BVFP (Unsat Core Track)

    - + Bitwuzla Fixedn 0 14297728.569730.3142 @@ -199,7 +199,7 @@

    QF_BVFP (Unsat Core Track)

    - + Bitwuzla 0 14297731.641732.3862 @@ -208,7 +208,7 @@

    QF_BVFP (Unsat Core Track)

    - + cvc5 0 139331968.2591924.8082 @@ -232,7 +232,6 @@

    QF_BVFP (Unsat Core Track)

    - + - diff --git a/archive/2023/results/qf-bvfplra-incremental.html b/archive/2023/results/qf-bvfplra-incremental.html index 07c6e968..055bca0b 100644 --- a/archive/2023/results/qf-bvfplra-incremental.html +++ b/archive/2023/results/qf-bvfplra-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_BVFPLRA (Incremental Track)

    Competition results for the QF_BVFPLRA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_BVFPLRA (Incremental Track)

    Parallel Performance cvc5 - - + + @@ -124,7 +124,7 @@

    QF_BVFPLRA (Incremental Track)

    - + cvc5 0 32784280.0282.4900 @@ -133,7 +133,7 @@

    QF_BVFPLRA (Incremental Track)

    - + Bitwuzla 0 32784288.52289.8400 @@ -142,7 +142,7 @@

    QF_BVFPLRA (Incremental Track)

    - + 2022-Bitwuzlan 0 327841021.711027.9500 @@ -166,7 +166,6 @@

    QF_BVFPLRA (Incremental Track)

    - + - diff --git a/archive/2023/results/qf-bvfplra-model-validation.html b/archive/2023/results/qf-bvfplra-model-validation.html index 2dba06d1..51e15af8 100644 --- a/archive/2023/results/qf-bvfplra-model-validation.html +++ b/archive/2023/results/qf-bvfplra-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_BVFPLRA (Model Validation Track)

    Competition results for the QF_BVFPLRA - + logic - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_BVFPLRA (Model Validation Track)

    Sequential PerformanceParallel Performance BitwuzlaBitwuzla - - + + @@ -126,7 +126,7 @@

    QF_BVFPLRA (Model Validation Track)

    - + Bitwuzla 0 124 @@ -137,7 +137,7 @@

    QF_BVFPLRA (Model Validation Track)

    - + Bitwuzla Fixedn 0 123 @@ -148,7 +148,7 @@

    QF_BVFPLRA (Model Validation Track)

    - + 2022-Bitwuzlan 0 120 @@ -159,7 +159,7 @@

    QF_BVFPLRA (Model Validation Track)

    - + cvc5 0 120 @@ -181,7 +181,7 @@

    QF_BVFPLRA (Model Validation Track)

    - + Bitwuzla 0 124184.872184.1620 @@ -190,7 +190,7 @@

    QF_BVFPLRA (Model Validation Track)

    - + Bitwuzla Fixedn 0 123181.866182.1850 @@ -199,7 +199,7 @@

    QF_BVFPLRA (Model Validation Track)

    - + 2022-Bitwuzlan 0 120146.277146.3111 @@ -208,7 +208,7 @@

    QF_BVFPLRA (Model Validation Track)

    - + cvc5 0 120688.602689.1691 @@ -232,7 +232,6 @@

    QF_BVFPLRA (Model Validation Track)

    - + - diff --git a/archive/2023/results/qf-bvfplra-proof-exhibition.html b/archive/2023/results/qf-bvfplra-proof-exhibition.html index 90a54d7c..d8987b97 100644 --- a/archive/2023/results/qf-bvfplra-proof-exhibition.html +++ b/archive/2023/results/qf-bvfplra-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_BVFPLRA (Proof Exhibition Track)

    Competition results for the QF_BVFPLRA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_BVFPLRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 17 @@ -130,7 +130,7 @@

    QF_BVFPLRA (Proof Exhibition Track)

    - + cvc5 0 14 @@ -152,7 +152,7 @@

    QF_BVFPLRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 171.6371.6193636 @@ -161,7 +161,7 @@

    QF_BVFPLRA (Proof Exhibition Track)

    - + cvc5 0 14193.437191.1523938 @@ -185,7 +185,6 @@

    QF_BVFPLRA (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/qf-bvfplra-single-query.html b/archive/2023/results/qf-bvfplra-single-query.html index 16f5d951..061546c1 100644 --- a/archive/2023/results/qf-bvfplra-single-query.html +++ b/archive/2023/results/qf-bvfplra-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_BVFPLRA (Single Query Track)

    Competition results for the QF_BVFPLRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_BVFPLRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) BitwuzlaBitwuzlaBitwuzla - - + + Bitwuzla - - + + Bitwuzla - + @@ -131,7 +131,7 @@

    QF_BVFPLRA (Single Query Track)

    - + 2022-Bitwuzlan 0 71 @@ -142,7 +142,7 @@

    QF_BVFPLRA (Single Query Track)

    - + Bitwuzla Fixedn 0 70 @@ -153,7 +153,7 @@

    QF_BVFPLRA (Single Query Track)

    - + Bitwuzla 0 70 @@ -164,7 +164,7 @@

    QF_BVFPLRA (Single Query Track)

    - + cvc5 0 60 @@ -175,7 +175,7 @@

    QF_BVFPLRA (Single Query Track)

    - + COLIBRI 0 58 @@ -197,7 +197,7 @@

    QF_BVFPLRA (Single Query Track)

    - + 2022-Bitwuzlan 0 713284.1783284.89571432877 @@ -206,7 +206,7 @@

    QF_BVFPLRA (Single Query Track)

    - + Bitwuzla Fixedn 0 703603.5113603.99270442688 @@ -215,7 +215,7 @@

    QF_BVFPLRA (Single Query Track)

    - + Bitwuzla 0 703613.433611.27870442688 @@ -224,7 +224,7 @@

    QF_BVFPLRA (Single Query Track)

    - + cvc5 0 605177.145177.645603426189 @@ -233,7 +233,7 @@

    QF_BVFPLRA (Single Query Track)

    - + COLIBRI 0 5873.21373.292583325209 @@ -253,7 +253,7 @@

    QF_BVFPLRA (Single Query Track)

    - + Bitwuzla 0 44181.308178.758444400348 @@ -262,7 +262,7 @@

    QF_BVFPLRA (Single Query Track)

    - + Bitwuzla Fixedn 0 44180.245180.323444400348 @@ -271,7 +271,7 @@

    QF_BVFPLRA (Single Query Track)

    - + 2022-Bitwuzlan 0 43116.497116.513434301347 @@ -280,7 +280,7 @@

    QF_BVFPLRA (Single Query Track)

    - + cvc5 0 34630.325630.4583434010349 @@ -289,7 +289,7 @@

    QF_BVFPLRA (Single Query Track)

    - + COLIBRI 0 3340.15140.1993333011349 @@ -309,7 +309,7 @@

    QF_BVFPLRA (Single Query Track)

    - + 2022-Bitwuzlan 0 283167.6813168.382280286447 @@ -318,7 +318,7 @@

    QF_BVFPLRA (Single Query Track)

    - + Bitwuzla Fixedn 0 263423.2663423.668260268448 @@ -327,7 +327,7 @@

    QF_BVFPLRA (Single Query Track)

    - + Bitwuzla 0 263432.1223432.52260268448 @@ -336,7 +336,7 @@

    QF_BVFPLRA (Single Query Track)

    - + cvc5 0 264546.8154547.186260268449 @@ -345,7 +345,7 @@

    QF_BVFPLRA (Single Query Track)

    - + COLIBRI 0 2533.06233.092250259449 @@ -365,7 +365,7 @@

    QF_BVFPLRA (Single Query Track)

    - + Bitwuzla 0 61175.537172.9826143181717 @@ -374,7 +374,7 @@

    QF_BVFPLRA (Single Query Track)

    - + Bitwuzla Fixedn 0 61173.089173.1676143181717 @@ -383,7 +383,7 @@

    QF_BVFPLRA (Single Query Track)

    - + 2022-Bitwuzlan 0 5987.67687.7045941181919 @@ -392,7 +392,7 @@

    QF_BVFPLRA (Single Query Track)

    - + COLIBRI 0 5873.21373.292583325209 @@ -401,7 +401,7 @@

    QF_BVFPLRA (Single Query Track)

    - + cvc5 0 46177.185177.2434630163223 @@ -425,7 +425,6 @@

    QF_BVFPLRA (Single Query Track)

    - + - diff --git a/archive/2023/results/qf-bvfplra-unsat-core.html b/archive/2023/results/qf-bvfplra-unsat-core.html index c743c240..28ec2094 100644 --- a/archive/2023/results/qf-bvfplra-unsat-core.html +++ b/archive/2023/results/qf-bvfplra-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_BVFPLRA (Unsat Core Track)

    Competition results for the QF_BVFPLRA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_BVFPLRA (Unsat Core Track)

    Sequential PerformanceParallel Performance BitwuzlaBitwuzla - - + + @@ -126,7 +126,7 @@

    QF_BVFPLRA (Unsat Core Track)

    - + Bitwuzla 0 325 @@ -137,7 +137,7 @@

    QF_BVFPLRA (Unsat Core Track)

    - + Bitwuzla Fixedn 0 325 @@ -148,7 +148,7 @@

    QF_BVFPLRA (Unsat Core Track)

    - + 2022-z3-4.8.17n 0 238 @@ -159,7 +159,7 @@

    QF_BVFPLRA (Unsat Core Track)

    - + cvc5 0 169 @@ -181,7 +181,7 @@

    QF_BVFPLRA (Unsat Core Track)

    - + Bitwuzla 0 3251764.6471757.7589 @@ -190,7 +190,7 @@

    QF_BVFPLRA (Unsat Core Track)

    - + Bitwuzla Fixedn 0 3251785.7711786.1289 @@ -199,7 +199,7 @@

    QF_BVFPLRA (Unsat Core Track)

    - + 2022-z3-4.8.17n 0 2383173.9963174.95220 @@ -208,7 +208,7 @@

    QF_BVFPLRA (Unsat Core Track)

    - + cvc5 0 1696481.2276482.0147 @@ -232,7 +232,6 @@

    QF_BVFPLRA (Unsat Core Track)

    - + - diff --git a/archive/2023/results/qf-datatypes-model-validation.html b/archive/2023/results/qf-datatypes-model-validation.html index 5de42675..ae4625d5 100644 --- a/archive/2023/results/qf-datatypes-model-validation.html +++ b/archive/2023/results/qf-datatypes-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Datatypes (Model Validation Track)

    Competition results for the QF_Datatypes - + division - + in the Model Validation Track.

    @@ -119,7 +119,7 @@

    QF_Datatypes (Model Validation Track)

    - + SMTInterpol 0 1792 @@ -130,7 +130,7 @@

    QF_Datatypes (Model Validation Track)

    - + cvc5 0 1198 @@ -152,7 +152,7 @@

    QF_Datatypes (Model Validation Track)

    - + SMTInterpol 0 17922854.1322418.05543 @@ -161,7 +161,7 @@

    QF_Datatypes (Model Validation Track)

    - + cvc5 0 11985432.6845433.68739 @@ -185,7 +185,6 @@

    QF_Datatypes (Model Validation Track)

    - + - diff --git a/archive/2023/results/qf-datatypes-proof-exhibition.html b/archive/2023/results/qf-datatypes-proof-exhibition.html index c754257e..8201a693 100644 --- a/archive/2023/results/qf-datatypes-proof-exhibition.html +++ b/archive/2023/results/qf-datatypes-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Datatypes (Proof Exhibition Track)

    Competition results for the QF_Datatypes - + division - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_Datatypes (Proof Exhibition Track)

    - + SMTInterpol 0 1113 @@ -130,7 +130,7 @@

    QF_Datatypes (Proof Exhibition Track)

    - + cvc5-lfsc 0 1092 @@ -141,7 +141,7 @@

    QF_Datatypes (Proof Exhibition Track)

    - + cvc5 0 1092 @@ -163,7 +163,7 @@

    QF_Datatypes (Proof Exhibition Track)

    - + SMTInterpol 0 111324799.31916481.6691170117 @@ -172,7 +172,7 @@

    QF_Datatypes (Proof Exhibition Track)

    - + cvc5-lfsc 0 109250.01248.971380138 @@ -181,7 +181,7 @@

    QF_Datatypes (Proof Exhibition Track)

    - + cvc5 0 109252.62251.0471380138 @@ -205,7 +205,6 @@

    QF_Datatypes (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/qf-datatypes-single-query.html b/archive/2023/results/qf-datatypes-single-query.html index affe5c0c..dbb48a1f 100644 --- a/archive/2023/results/qf-datatypes-single-query.html +++ b/archive/2023/results/qf-datatypes-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Datatypes (Single Query Track)

    Competition results for the QF_Datatypes - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_Datatypes (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + SMTInterpol - + @@ -131,7 +131,7 @@

    QF_Datatypes (Single Query Track)

    - + 2022-z3-4.8.17n 0 201 @@ -142,7 +142,7 @@

    QF_Datatypes (Single Query Track)

    - + cvc5 0 182 @@ -153,7 +153,7 @@

    QF_Datatypes (Single Query Track)

    - + SMTInterpol 0 67 @@ -175,7 +175,7 @@

    QF_Datatypes (Single Query Track)

    - + 2022-z3-4.8.17n 0 20190328.83390338.814201471542000200 @@ -184,7 +184,7 @@

    QF_Datatypes (Single Query Track)

    - + cvc5 0 18292586.96792546.261182471352190219 @@ -193,7 +193,7 @@

    QF_Datatypes (Single Query Track)

    - + SMTInterpol 0 8557950.26940363.9118540453160316 @@ -213,7 +213,7 @@

    QF_Datatypes (Single Query Track)

    - + cvc5 0 4723634.98523640.63947470153201219 @@ -222,7 +222,7 @@

    QF_Datatypes (Single Query Track)

    - + 2022-z3-4.8.17n 0 4726104.2426106.79347470153201200 @@ -231,7 +231,7 @@

    QF_Datatypes (Single Query Track)

    - + SMTInterpol 0 4014551.33513819.73840400160201316 @@ -251,7 +251,7 @@

    QF_Datatypes (Single Query Track)

    - + 2022-z3-4.8.17n 0 15464224.59264232.021154015447200200 @@ -260,7 +260,7 @@

    QF_Datatypes (Single Query Track)

    - + cvc5 0 13568951.98268905.622135013566200219 @@ -269,7 +269,7 @@

    QF_Datatypes (Single Query Track)

    - + SMTInterpol 0 4543398.93426544.17345045156200316 @@ -289,7 +289,7 @@

    QF_Datatypes (Single Query Track)

    - + SMTInterpol 0 9228.854131.6199813920392 @@ -298,7 +298,7 @@

    QF_Datatypes (Single Query Track)

    - + 2022-z3-4.8.17n 0 345.01145.0073033980398 @@ -307,7 +307,7 @@

    QF_Datatypes (Single Query Track)

    - + cvc5 0 116.41816.4191014000400 @@ -331,7 +331,6 @@

    QF_Datatypes (Single Query Track)

    - + - diff --git a/archive/2023/results/qf-datatypes-unsat-core.html b/archive/2023/results/qf-datatypes-unsat-core.html index beda3f2f..0642fcbc 100644 --- a/archive/2023/results/qf-datatypes-unsat-core.html +++ b/archive/2023/results/qf-datatypes-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Datatypes (Unsat Core Track)

    Competition results for the QF_Datatypes - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_Datatypes (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5SMTInterpol - - + + @@ -126,7 +126,7 @@

    QF_Datatypes (Unsat Core Track)

    - + 2022-z3-4.8.17n 0 1005452 @@ -137,7 +137,7 @@

    QF_Datatypes (Unsat Core Track)

    - + cvc5 0 211668 @@ -148,7 +148,7 @@

    QF_Datatypes (Unsat Core Track)

    - + SMTInterpol 0 209336 @@ -170,7 +170,7 @@

    QF_Datatypes (Unsat Core Track)

    - + 2022-z3-4.8.17n 0 100545235292.71835308.28596 @@ -179,7 +179,7 @@

    QF_Datatypes (Unsat Core Track)

    - + SMTInterpol 0 29464445436.9627759.684155 @@ -188,7 +188,7 @@

    QF_Datatypes (Unsat Core Track)

    - + cvc5 0 21166823425.68523428.408163 @@ -212,7 +212,6 @@

    QF_Datatypes (Unsat Core Track)

    - + - diff --git a/archive/2023/results/qf-dt-model-validation.html b/archive/2023/results/qf-dt-model-validation.html index d6d8acae..491d607a 100644 --- a/archive/2023/results/qf-dt-model-validation.html +++ b/archive/2023/results/qf-dt-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_DT (Model Validation Track)

    Competition results for the QF_DT - + logic - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_DT (Model Validation Track)

    Sequential PerformanceParallel Performance SMTInterpolSMTInterpol - - + + @@ -126,7 +126,7 @@

    QF_DT (Model Validation Track)

    - + SMTInterpol 0 1792 @@ -137,7 +137,7 @@

    QF_DT (Model Validation Track)

    - + cvc5 0 1198 @@ -159,7 +159,7 @@

    QF_DT (Model Validation Track)

    - + SMTInterpol 0 17922854.1322418.05543 @@ -168,7 +168,7 @@

    QF_DT (Model Validation Track)

    - + cvc5 0 11985432.6845433.68739 @@ -192,7 +192,6 @@

    QF_DT (Model Validation Track)

    - + - diff --git a/archive/2023/results/qf-dt-proof-exhibition.html b/archive/2023/results/qf-dt-proof-exhibition.html index 7e27bea9..0a371c70 100644 --- a/archive/2023/results/qf-dt-proof-exhibition.html +++ b/archive/2023/results/qf-dt-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_DT (Proof Exhibition Track)

    Competition results for the QF_DT - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_DT (Proof Exhibition Track)

    - + SMTInterpol 0 1095 @@ -130,7 +130,7 @@

    QF_DT (Proof Exhibition Track)

    - + cvc5-lfsc 0 1092 @@ -141,7 +141,7 @@

    QF_DT (Proof Exhibition Track)

    - + cvc5 0 1092 @@ -163,7 +163,7 @@

    QF_DT (Proof Exhibition Track)

    - + SMTInterpol 0 10953601.6042476.0333535 @@ -172,7 +172,7 @@

    QF_DT (Proof Exhibition Track)

    - + cvc5-lfsc 0 109250.01248.973838 @@ -181,7 +181,7 @@

    QF_DT (Proof Exhibition Track)

    - + cvc5 0 109252.62251.0473838 @@ -205,7 +205,6 @@

    QF_DT (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/qf-dt-single-query.html b/archive/2023/results/qf-dt-single-query.html index e3602b36..c339d2ff 100644 --- a/archive/2023/results/qf-dt-single-query.html +++ b/archive/2023/results/qf-dt-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_DT (Single Query Track)

    Competition results for the QF_DT - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_DT (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + SMTInterpol - + @@ -131,7 +131,7 @@

    QF_DT (Single Query Track)

    - + 2022-z3-4.8.17n 0 103 @@ -142,7 +142,7 @@

    QF_DT (Single Query Track)

    - + cvc5 0 86 @@ -153,7 +153,7 @@

    QF_DT (Single Query Track)

    - + SMTInterpol 0 29 @@ -175,7 +175,7 @@

    QF_DT (Single Query Track)

    - + 2022-z3-4.8.17n 0 10354329.68354335.16910341629898 @@ -184,7 +184,7 @@

    QF_DT (Single Query Track)

    - + cvc5 0 8645760.34945768.208862561115115 @@ -193,7 +193,7 @@

    QF_DT (Single Query Track)

    - + SMTInterpol 0 3827398.11618770.7381919163163 @@ -213,7 +213,7 @@

    QF_DT (Single Query Track)

    - + 2022-z3-4.8.17n 0 4123076.74223079.023414105910198 @@ -222,7 +222,7 @@

    QF_DT (Single Query Track)

    - + cvc5 0 2513167.83113171.5182525075101115 @@ -231,7 +231,7 @@

    QF_DT (Single Query Track)

    - + SMTInterpol 0 196737.1276414.3221919081101163 @@ -251,7 +251,7 @@

    QF_DT (Single Query Track)

    - + 2022-z3-4.8.17n 0 6231252.94131256.146620623910098 @@ -260,7 +260,7 @@

    QF_DT (Single Query Track)

    - + cvc5 0 6132592.51832596.696106140100115 @@ -269,7 +269,7 @@

    QF_DT (Single Query Track)

    - + SMTInterpol 0 1920660.98912356.3781901982100163 @@ -289,7 +289,7 @@

    QF_DT (Single Query Track)

    - + SMTInterpol 0 6148.9486.111651195195 @@ -298,7 +298,7 @@

    QF_DT (Single Query Track)

    - + 2022-z3-4.8.17n 0 221.18821.186202199199 @@ -307,7 +307,7 @@

    QF_DT (Single Query Track)

    - + cvc5 0 116.41816.419101200200 @@ -331,7 +331,6 @@

    QF_DT (Single Query Track)

    - + - diff --git a/archive/2023/results/qf-dt-unsat-core.html b/archive/2023/results/qf-dt-unsat-core.html index 0e7f2e0f..a960bbd2 100644 --- a/archive/2023/results/qf-dt-unsat-core.html +++ b/archive/2023/results/qf-dt-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_DT (Unsat Core Track)

    Competition results for the QF_DT - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_DT (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5SMTInterpol - - + + @@ -126,7 +126,7 @@

    QF_DT (Unsat Core Track)

    - + 2022-z3-4.8.17n 0 325609 @@ -137,7 +137,7 @@

    QF_DT (Unsat Core Track)

    - + cvc5 0 110188 @@ -148,7 +148,7 @@

    QF_DT (Unsat Core Track)

    - + SMTInterpol 0 97694 @@ -170,7 +170,7 @@

    QF_DT (Unsat Core Track)

    - + 2022-z3-4.8.17n 0 32560910496.84810502.16376 @@ -179,7 +179,7 @@

    QF_DT (Unsat Core Track)

    - + SMTInterpol 0 12911020384.81112304.30782 @@ -188,7 +188,7 @@

    QF_DT (Unsat Core Track)

    - + cvc5 0 1101889905.1449906.57985 @@ -212,7 +212,6 @@

    QF_DT (Unsat Core Track)

    - + - diff --git a/archive/2023/results/qf-equality-bitvec-arith-incremental.html b/archive/2023/results/qf-equality-bitvec-arith-incremental.html index 7382702a..88849479 100644 --- a/archive/2023/results/qf-equality-bitvec-arith-incremental.html +++ b/archive/2023/results/qf-equality-bitvec-arith-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Equality+Bitvec+Arith (Incremental Track)

    Competition results for the QF_Equality+Bitvec+Arith - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_Equality+Bitvec+Arith (Incremental Track)

    Parallel Performance cvc5 - - + + @@ -124,7 +124,7 @@

    QF_Equality+Bitvec+Arith (Incremental Track)

    - + Yices2 Fixedn 0 8175336.445359.1917017 @@ -133,7 +133,7 @@

    QF_Equality+Bitvec+Arith (Incremental Track)

    - + cvc5 0 76124823.5124860.073073 @@ -142,7 +142,7 @@

    QF_Equality+Bitvec+Arith (Incremental Track)

    - + Yices2 1 8175868.865891.0117016 @@ -166,7 +166,6 @@

    QF_Equality+Bitvec+Arith (Incremental Track)

    - + - diff --git a/archive/2023/results/qf-equality-bitvec-incremental.html b/archive/2023/results/qf-equality-bitvec-incremental.html index 2c40f68d..9670f049 100644 --- a/archive/2023/results/qf-equality-bitvec-incremental.html +++ b/archive/2023/results/qf-equality-bitvec-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Equality+Bitvec (Incremental Track)

    Competition results for the QF_Equality+Bitvec - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_Equality+Bitvec (Incremental Track)

    Parallel Performance Bitwuzla - - + + @@ -124,7 +124,7 @@

    QF_Equality+Bitvec (Incremental Track)

    - + Bitwuzla 0 565319016.7419287.1319019 @@ -133,7 +133,7 @@

    QF_Equality+Bitvec (Incremental Track)

    - + Yices2 Fixedn 0 562311303.5611499.7549023 @@ -142,7 +142,7 @@

    QF_Equality+Bitvec (Incremental Track)

    - + Yices2 0 562311487.7311682.6949023 @@ -151,7 +151,7 @@

    QF_Equality+Bitvec (Incremental Track)

    - + 2022-Yices2n 0 562311507.6911710.5849023 @@ -160,7 +160,7 @@

    QF_Equality+Bitvec (Incremental Track)

    - + cvc5 0 476237186.8837302.089100220 @@ -184,7 +184,6 @@

    QF_Equality+Bitvec (Incremental Track)

    - + - diff --git a/archive/2023/results/qf-equality-bitvec-model-validation.html b/archive/2023/results/qf-equality-bitvec-model-validation.html index 3beb9edb..68047abd 100644 --- a/archive/2023/results/qf-equality-bitvec-model-validation.html +++ b/archive/2023/results/qf-equality-bitvec-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Equality+Bitvec (Model Validation Track)

    Competition results for the QF_Equality+Bitvec - + division - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_Equality+Bitvec (Model Validation Track)

    Sequential PerformanceParallel Performance BitwuzlaBitwuzla - - + + @@ -126,7 +126,7 @@

    QF_Equality+Bitvec (Model Validation Track)

    - + Bitwuzla 0 411 @@ -137,7 +137,7 @@

    QF_Equality+Bitvec (Model Validation Track)

    - + Bitwuzla Fixedn 0 411 @@ -148,7 +148,7 @@

    QF_Equality+Bitvec (Model Validation Track)

    - + 2022-Bitwuzlan 0 406 @@ -159,7 +159,7 @@

    QF_Equality+Bitvec (Model Validation Track)

    - + Yices2 0 401 @@ -170,7 +170,7 @@

    QF_Equality+Bitvec (Model Validation Track)

    - + cvc5 0 388 @@ -192,7 +192,7 @@

    QF_Equality+Bitvec (Model Validation Track)

    - + Bitwuzla 0 41114080.74314087.1821 @@ -201,7 +201,7 @@

    QF_Equality+Bitvec (Model Validation Track)

    - + Bitwuzla Fixedn 0 41114200.80614148.051 @@ -210,7 +210,7 @@

    QF_Equality+Bitvec (Model Validation Track)

    - + 2022-Bitwuzlan 0 4064750.4454701.5464 @@ -219,7 +219,7 @@

    QF_Equality+Bitvec (Model Validation Track)

    - + Yices2 0 40111433.11911404.25511 @@ -228,7 +228,7 @@

    QF_Equality+Bitvec (Model Validation Track)

    - + cvc5 0 38814939.29314941.77124 @@ -252,7 +252,6 @@

    QF_Equality+Bitvec (Model Validation Track)

    - + - diff --git a/archive/2023/results/qf-equality-bitvec-proof-exhibition.html b/archive/2023/results/qf-equality-bitvec-proof-exhibition.html index 4952de95..c40529d1 100644 --- a/archive/2023/results/qf-equality-bitvec-proof-exhibition.html +++ b/archive/2023/results/qf-equality-bitvec-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Equality+Bitvec (Proof Exhibition Track)

    Competition results for the QF_Equality+Bitvec - + division - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_Equality+Bitvec (Proof Exhibition Track)

    - + cvc5-lfsc 0 1203 @@ -130,7 +130,7 @@

    QF_Equality+Bitvec (Proof Exhibition Track)

    - + cvc5 0 796 @@ -152,7 +152,7 @@

    QF_Equality+Bitvec (Proof Exhibition Track)

    - + cvc5-lfsc 0 120355146.41854718.3151860182 @@ -161,7 +161,7 @@

    QF_Equality+Bitvec (Proof Exhibition Track)

    - + cvc5 0 7961637.0511590.7975930563 @@ -185,7 +185,6 @@

    QF_Equality+Bitvec (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/qf-equality-bitvec-single-query.html b/archive/2023/results/qf-equality-bitvec-single-query.html index 4c6fad78..f8d39aef 100644 --- a/archive/2023/results/qf-equality-bitvec-single-query.html +++ b/archive/2023/results/qf-equality-bitvec-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Equality+Bitvec (Single Query Track)

    Competition results for the QF_Equality+Bitvec - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_Equality+Bitvec (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) BitwuzlaBitwuzlaBitwuzla - - + + Bitwuzla - - + + Bitwuzla - + @@ -131,7 +131,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + Bitwuzla 0 1641 @@ -142,7 +142,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + Bitwuzla Fixedn 0 1640 @@ -153,7 +153,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + 2022-Bitwuzlan 0 1602 @@ -164,7 +164,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + Yices2 0 1590 @@ -175,7 +175,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + cvc5 0 1545 @@ -186,7 +186,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + Z3-Owl Fixedn 2 1316 @@ -197,7 +197,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 3 497 @@ -208,7 +208,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + Z3-Owl 295 1271 @@ -230,7 +230,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + Bitwuzla 0 164137488.46637312.0116411010631387637 @@ -239,7 +239,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + Bitwuzla Fixedn 0 164037261.16537191.25916401010630397638 @@ -248,7 +248,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + 2022-Bitwuzlan 0 160223750.75123670.4781602978624777632 @@ -257,7 +257,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + Yices2 0 159057168.08357138.9961590995595897689 @@ -266,7 +266,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + cvc5 0 154571784.01771588.83315459725732100199 @@ -275,7 +275,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + Z3-Owl Fixedn 2 131648789.27848849.557131680850836376199 @@ -284,7 +284,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 3 49956628.69347445.032499311188125601016 @@ -293,7 +293,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + Z3-Owl 295 127147988.60748196.816127196530640876100 @@ -313,7 +313,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + Bitwuzla 0 101022082.31622079.535101010100973637 @@ -322,7 +322,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + Bitwuzla Fixedn 0 101022106.27222096.658101010100973638 @@ -331,7 +331,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + Yices2 0 99523735.16223704.31199599502473689 @@ -340,7 +340,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + 2022-Bitwuzlan 0 97811969.54711889.59397897804173632 @@ -349,7 +349,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + cvc5 0 97237862.39837777.535972972090693199 @@ -358,7 +358,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + Z3-Owl Fixedn 2 80826038.10426091.9138088080211736199 @@ -367,7 +367,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 3 31146885.18240084.95431131107516931016 @@ -376,7 +376,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + Z3-Owl 295 96527791.66927990.057965965054736100 @@ -396,7 +396,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + Bitwuzla 0 63115406.1515232.475631063118110637 @@ -405,7 +405,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + Bitwuzla Fixedn 0 63015154.89315094.601630063019110638 @@ -414,7 +414,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + 2022-Bitwuzlan 0 62411781.20411780.885624062425110632 @@ -423,7 +423,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + Yices2 0 59533432.92133434.685595059554110689 @@ -432,7 +432,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + cvc5 0 57333921.61933811.2985730573821100199 @@ -441,7 +441,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + Z3-Owl Fixedn 0 50822751.17422757.64450805081411106199 @@ -450,7 +450,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + Z3-Owl 0 30620196.93720206.75830603063431106100 @@ -459,7 +459,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 0 1889743.5117360.078188018846711001016 @@ -479,7 +479,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + Bitwuzla 0 14555122.5585084.169145590954622476224 @@ -488,7 +488,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + Bitwuzla Fixedn 0 14515079.8995001.429145190954222876228 @@ -497,7 +497,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + Yices2 0 14441905.7471870.89144493351123576235 @@ -506,7 +506,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + 2022-Bitwuzlan 0 14421915.9521908.58144288655623776192 @@ -515,7 +515,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + cvc5 0 11462820.5282779.32911467993476090600 @@ -524,7 +524,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 0 3815754.8662122.372381214167137401200 @@ -533,7 +533,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + Z3-Owl Fixedn 2 11292428.8562413.901112972840155076483 @@ -542,7 +542,7 @@

    QF_Equality+Bitvec (Single Query Track)

    - + Z3-Owl 286 11222570.862606.361112286925355776260 @@ -566,7 +566,6 @@

    QF_Equality+Bitvec (Single Query Track)

    - + - diff --git a/archive/2023/results/qf-equality-bitvec-unsat-core.html b/archive/2023/results/qf-equality-bitvec-unsat-core.html index 900a16b3..03b27424 100644 --- a/archive/2023/results/qf-equality-bitvec-unsat-core.html +++ b/archive/2023/results/qf-equality-bitvec-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Equality+Bitvec (Unsat Core Track)

    Competition results for the QF_Equality+Bitvec - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_Equality+Bitvec (Unsat Core Track)

    Sequential PerformanceParallel Performance Yices2Yices2 - - + + @@ -126,7 +126,7 @@

    QF_Equality+Bitvec (Unsat Core Track)

    - + 2022-Bitwuzlan 0 1003528 @@ -137,7 +137,7 @@

    QF_Equality+Bitvec (Unsat Core Track)

    - + Yices2 0 791927 @@ -148,7 +148,7 @@

    QF_Equality+Bitvec (Unsat Core Track)

    - + Bitwuzla Fixedn 0 398437 @@ -159,7 +159,7 @@

    QF_Equality+Bitvec (Unsat Core Track)

    - + cvc5 0 111565 @@ -170,7 +170,7 @@

    QF_Equality+Bitvec (Unsat Core Track)

    - + Bitwuzla 3* 400082 @@ -192,7 +192,7 @@

    QF_Equality+Bitvec (Unsat Core Track)

    - + 2022-Bitwuzlan 0 100352820233.87720176.71919 @@ -201,7 +201,7 @@

    QF_Equality+Bitvec (Unsat Core Track)

    - + Yices2 0 79192711424.26911386.01770 @@ -210,7 +210,7 @@

    QF_Equality+Bitvec (Unsat Core Track)

    - + Bitwuzla Fixedn 0 39843717963.76417962.58512 @@ -219,7 +219,7 @@

    QF_Equality+Bitvec (Unsat Core Track)

    - + cvc5 0 11156518403.5118212.72830 @@ -228,7 +228,7 @@

    QF_Equality+Bitvec (Unsat Core Track)

    - + Bitwuzla 3* 40008218068.08518075.75411 @@ -255,7 +255,6 @@

    QF_Equality+Bitvec (Unsat Core Track)

    - + - diff --git a/archive/2023/results/qf-equality-incremental.html b/archive/2023/results/qf-equality-incremental.html index 5bb94105..cebac7fc 100644 --- a/archive/2023/results/qf-equality-incremental.html +++ b/archive/2023/results/qf-equality-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Equality (Incremental Track)

    Competition results for the QF_Equality - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_Equality (Incremental Track)

    Parallel Performance cvc5 - - + + @@ -124,7 +124,7 @@

    QF_Equality (Incremental Track)

    - + cvc5 0 14063614.26682.74000 @@ -133,7 +133,7 @@

    QF_Equality (Incremental Track)

    - + SMTInterpol 0 140633735.651564.13000 @@ -142,7 +142,7 @@

    QF_Equality (Incremental Track)

    - + Yices2 Fixedn 0 76223.8677.131330100 @@ -151,7 +151,7 @@

    QF_Equality (Incremental Track)

    - + Yices2 0 76224.2277.431330100 @@ -160,7 +160,7 @@

    QF_Equality (Incremental Track)

    - + 2022-Yices2n 0 76231.1882.751330100 @@ -169,7 +169,7 @@

    QF_Equality (Incremental Track)

    - + OpenSMT 0 762787.49833.631330100 @@ -193,7 +193,6 @@

    QF_Equality (Incremental Track)

    - + - diff --git a/archive/2023/results/qf-equality-lineararith-incremental.html b/archive/2023/results/qf-equality-lineararith-incremental.html index 31781a1d..f6199ca2 100644 --- a/archive/2023/results/qf-equality-lineararith-incremental.html +++ b/archive/2023/results/qf-equality-lineararith-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Equality+LinearArith (Incremental Track)

    Competition results for the QF_Equality+LinearArith - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_Equality+LinearArith (Incremental Track)

    Parallel Performance SMTInterpol - - + + @@ -124,7 +124,7 @@

    QF_Equality+LinearArith (Incremental Track)

    - + 2020-z3n 0 5688167101871.58101388.1645781067 @@ -133,7 +133,7 @@

    QF_Equality+LinearArith (Incremental Track)

    - + SMTInterpol 0 4883289200740.25161523.598506590122 @@ -142,7 +142,7 @@

    QF_Equality+LinearArith (Incremental Track)

    - + cvc5 0 4611536157732.25157085.8811224120108 @@ -151,7 +151,7 @@

    QF_Equality+LinearArith (Incremental Track)

    - + Yices2 Fixedn 0 407446289049.7688598.071659486077 @@ -160,7 +160,7 @@

    QF_Equality+LinearArith (Incremental Track)

    - + Yices2 0 407445289670.4889212.441659496077 @@ -169,7 +169,7 @@

    QF_Equality+LinearArith (Incremental Track)

    - + OpenSMT 0 358984212709.23212496.1453749640178 @@ -193,7 +193,6 @@

    QF_Equality+LinearArith (Incremental Track)

    - + - diff --git a/archive/2023/results/qf-equality-lineararith-model-validation.html b/archive/2023/results/qf-equality-lineararith-model-validation.html index 80dc0cf2..e4a39906 100644 --- a/archive/2023/results/qf-equality-lineararith-model-validation.html +++ b/archive/2023/results/qf-equality-lineararith-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Equality+LinearArith (Model Validation Track)

    Competition results for the QF_Equality+LinearArith - + division - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_Equality+LinearArith (Model Validation Track)

    Sequential PerformanceParallel Performance SMTInterpolSMTInterpol - - + + @@ -126,7 +126,7 @@

    QF_Equality+LinearArith (Model Validation Track)

    - + 2022-smtinterpoln 0 909 @@ -137,7 +137,7 @@

    QF_Equality+LinearArith (Model Validation Track)

    - + SMTInterpol 0 895 @@ -148,7 +148,7 @@

    QF_Equality+LinearArith (Model Validation Track)

    - + OpenSMT 0 893 @@ -159,7 +159,7 @@

    QF_Equality+LinearArith (Model Validation Track)

    - + cvc5 0 870 @@ -170,7 +170,7 @@

    QF_Equality+LinearArith (Model Validation Track)

    - + Yices2 0 844 @@ -192,7 +192,7 @@

    QF_Equality+LinearArith (Model Validation Track)

    - + 2022-smtinterpoln 0 91021438.56616805.95711 @@ -201,7 +201,7 @@

    QF_Equality+LinearArith (Model Validation Track)

    - + SMTInterpol 0 89626728.82422334.6125 @@ -210,7 +210,7 @@

    QF_Equality+LinearArith (Model Validation Track)

    - + OpenSMT 0 89312161.3112156.12428 @@ -219,7 +219,7 @@

    QF_Equality+LinearArith (Model Validation Track)

    - + cvc5 0 87030144.94130148.71151 @@ -228,7 +228,7 @@

    QF_Equality+LinearArith (Model Validation Track)

    - + Yices2 0 84420388.9820392.09777 @@ -252,7 +252,6 @@

    QF_Equality+LinearArith (Model Validation Track)

    - + - diff --git a/archive/2023/results/qf-equality-lineararith-proof-exhibition.html b/archive/2023/results/qf-equality-lineararith-proof-exhibition.html index 1c09f27d..72fffb86 100644 --- a/archive/2023/results/qf-equality-lineararith-proof-exhibition.html +++ b/archive/2023/results/qf-equality-lineararith-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Equality+LinearArith (Proof Exhibition Track)

    Competition results for the QF_Equality+LinearArith - + division - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_Equality+LinearArith (Proof Exhibition Track)

    - + SMTInterpol 0 742 @@ -130,7 +130,7 @@

    QF_Equality+LinearArith (Proof Exhibition Track)

    - + cvc5-lfsc 0 709 @@ -141,7 +141,7 @@

    QF_Equality+LinearArith (Proof Exhibition Track)

    - + cvc5 0 274 @@ -163,7 +163,7 @@

    QF_Equality+LinearArith (Proof Exhibition Track)

    - + SMTInterpol 0 74217355.67611877.23957057 @@ -172,7 +172,7 @@

    QF_Equality+LinearArith (Proof Exhibition Track)

    - + cvc5-lfsc 0 70924092.36724062.03290090 @@ -181,7 +181,7 @@

    QF_Equality+LinearArith (Proof Exhibition Track)

    - + cvc5 0 2743257.7213230.3695250523 @@ -205,7 +205,6 @@

    QF_Equality+LinearArith (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/qf-equality-lineararith-single-query.html b/archive/2023/results/qf-equality-lineararith-single-query.html index 0575b03d..e8bc1e7f 100644 --- a/archive/2023/results/qf-equality-lineararith-single-query.html +++ b/archive/2023/results/qf-equality-lineararith-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Equality+LinearArith (Single Query Track)

    Competition results for the QF_Equality+LinearArith - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_Equality+LinearArith (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) SMTInterpolSMTInterpolSMTInterpol - - + + OpenSMT - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + SMTInterpol 0 1809 @@ -142,7 +142,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + OpenSMT 0 1772 @@ -153,7 +153,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + 2022-z3-4.8.17n 0 1749 @@ -164,7 +164,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + cvc5 0 1740 @@ -175,7 +175,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + Yices2 0 1735 @@ -197,7 +197,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + SMTInterpol 0 181662150.87546637.515181610597571060106 @@ -206,7 +206,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + OpenSMT 0 177240949.93940862.7981772993779658565 @@ -215,7 +215,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + 2022-z3-4.8.17n 0 174928616.21828611.8351749976773977697 @@ -224,7 +224,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + cvc5 0 174061435.26561281.34117409767641820182 @@ -233,7 +233,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + Yices2 0 173527167.15227168.868173596077510285102 @@ -253,7 +253,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + SMTInterpol 0 105935517.41329611.16110591059014849106 @@ -262,7 +262,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + OpenSMT 0 99321039.39420957.3999399302890165 @@ -271,7 +271,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + 2022-z3-4.8.17n 0 97613533.69913533.03597697605089697 @@ -280,7 +280,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + cvc5 0 97634579.55934490.907976976097849182 @@ -289,7 +289,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + Yices2 0 96014966.93714969.038960960061901102 @@ -309,7 +309,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + OpenSMT 0 77919910.54519905.407779077926111765 @@ -318,7 +318,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + Yices2 0 77512200.21512199.837750775301117102 @@ -327,7 +327,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + 2022-z3-4.8.17n 0 77315082.51915078.8773077336111397 @@ -336,7 +336,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + cvc5 0 76426855.70626790.4347640764561102182 @@ -345,7 +345,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + SMTInterpol 0 75726633.46217026.3547570757631102106 @@ -365,7 +365,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + Yices2 0 1675423.367422.249167592774816285162 @@ -374,7 +374,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + 2022-z3-4.8.17n 0 1665820.771813.288166593373218176181 @@ -383,7 +383,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + SMTInterpol 0 16568365.3683285.95116569537032660266 @@ -392,7 +392,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + OpenSMT 0 16092306.3672268.718160990570422885228 @@ -401,7 +401,7 @@

    QF_Equality+LinearArith (Single Query Track)

    - + cvc5 0 15611208.121196.47915618896723610361 @@ -425,7 +425,6 @@

    QF_Equality+LinearArith (Single Query Track)

    - + - diff --git a/archive/2023/results/qf-equality-lineararith-unsat-core.html b/archive/2023/results/qf-equality-lineararith-unsat-core.html index ba5e131c..adf8bd50 100644 --- a/archive/2023/results/qf-equality-lineararith-unsat-core.html +++ b/archive/2023/results/qf-equality-lineararith-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Equality+LinearArith (Unsat Core Track)

    Competition results for the QF_Equality+LinearArith - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_Equality+LinearArith (Unsat Core Track)

    Sequential PerformanceParallel Performance Yices2Yices2 - - + + @@ -126,7 +126,7 @@

    QF_Equality+LinearArith (Unsat Core Track)

    - + 2022-Yices2n 0 1092093 @@ -137,7 +137,7 @@

    QF_Equality+LinearArith (Unsat Core Track)

    - + Yices2 0 1048219 @@ -148,7 +148,7 @@

    QF_Equality+LinearArith (Unsat Core Track)

    - + cvc5 0 461775 @@ -159,7 +159,7 @@

    QF_Equality+LinearArith (Unsat Core Track)

    - + SMTInterpol 0 316411 @@ -181,7 +181,7 @@

    QF_Equality+LinearArith (Unsat Core Track)

    - + 2022-Yices2n 0 109209326528.5626534.56845 @@ -190,7 +190,7 @@

    QF_Equality+LinearArith (Unsat Core Track)

    - + Yices2 0 104821925367.29525372.77548 @@ -199,7 +199,7 @@

    QF_Equality+LinearArith (Unsat Core Track)

    - + cvc5 0 4617759628.2139628.623102 @@ -208,7 +208,7 @@

    QF_Equality+LinearArith (Unsat Core Track)

    - + SMTInterpol 0 32247315740.17510516.56296 @@ -232,7 +232,6 @@

    QF_Equality+LinearArith (Unsat Core Track)

    - + - diff --git a/archive/2023/results/qf-equality-model-validation.html b/archive/2023/results/qf-equality-model-validation.html index 134f8240..d22071ba 100644 --- a/archive/2023/results/qf-equality-model-validation.html +++ b/archive/2023/results/qf-equality-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Equality (Model Validation Track)

    Competition results for the QF_Equality - + division - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_Equality (Model Validation Track)

    Sequential PerformanceParallel Performance Yices2Yices2 - - + + @@ -126,7 +126,7 @@

    QF_Equality (Model Validation Track)

    - + 2021-Yices2 model-validationn 0 1571 @@ -137,7 +137,7 @@

    QF_Equality (Model Validation Track)

    - + 2022-Yices2n 0 1571 @@ -148,7 +148,7 @@

    QF_Equality (Model Validation Track)

    - + Yices2 0 1571 @@ -159,7 +159,7 @@

    QF_Equality (Model Validation Track)

    - + OpenSMT 0 1571 @@ -170,7 +170,7 @@

    QF_Equality (Model Validation Track)

    - + cvc5 0 1571 @@ -181,7 +181,7 @@

    QF_Equality (Model Validation Track)

    - + SMTInterpol 0 1571 @@ -203,7 +203,7 @@

    QF_Equality (Model Validation Track)

    - + 2021-Yices2 model-validationn 0 157167.85970.1240 @@ -212,7 +212,7 @@

    QF_Equality (Model Validation Track)

    - + 2022-Yices2n 0 157167.87770.1280 @@ -221,7 +221,7 @@

    QF_Equality (Model Validation Track)

    - + Yices2 0 157168.69570.910 @@ -230,7 +230,7 @@

    QF_Equality (Model Validation Track)

    - + OpenSMT 0 1571260.241269.5460 @@ -239,7 +239,7 @@

    QF_Equality (Model Validation Track)

    - + cvc5 0 1571459.001453.6260 @@ -248,7 +248,7 @@

    QF_Equality (Model Validation Track)

    - + SMTInterpol 0 15714261.4141700.0920 @@ -272,7 +272,6 @@

    QF_Equality (Model Validation Track)

    - + - diff --git a/archive/2023/results/qf-equality-nonlineararith-incremental.html b/archive/2023/results/qf-equality-nonlineararith-incremental.html index fad9cf57..2f731014 100644 --- a/archive/2023/results/qf-equality-nonlineararith-incremental.html +++ b/archive/2023/results/qf-equality-nonlineararith-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Equality+NonLinearArith (Incremental Track)

    Competition results for the QF_Equality+NonLinearArith - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_Equality+NonLinearArith (Incremental Track)

    Parallel Performance cvc5 - - + + @@ -124,7 +124,7 @@

    QF_Equality+NonLinearArith (Incremental Track)

    - + 2022-z3-4.8.17n 0 6637695.94125.07000 @@ -133,7 +133,7 @@

    QF_Equality+NonLinearArith (Incremental Track)

    - + cvc5 0 6636217975.9218021.01401 @@ -142,7 +142,7 @@

    QF_Equality+NonLinearArith (Incremental Track)

    - + SMTInterpol 0 636591636.57679.73271700 @@ -151,7 +151,7 @@

    QF_Equality+NonLinearArith (Incremental Track)

    - + Yices2 0 231886938.616970.6943188011 @@ -160,7 +160,7 @@

    QF_Equality+NonLinearArith (Incremental Track)

    - + Yices2 Fixedn 2 197084964.814996.174666808 @@ -184,7 +184,6 @@

    QF_Equality+NonLinearArith (Incremental Track)

    - + - diff --git a/archive/2023/results/qf-equality-nonlineararith-model-validation.html b/archive/2023/results/qf-equality-nonlineararith-model-validation.html index 5602a9f3..7144d6a8 100644 --- a/archive/2023/results/qf-equality-nonlineararith-model-validation.html +++ b/archive/2023/results/qf-equality-nonlineararith-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Equality+NonLinearArith (Model Validation Track)

    Competition results for the QF_Equality+NonLinearArith - + division - + in the Model Validation Track.

    @@ -119,7 +119,7 @@

    QF_Equality+NonLinearArith (Model Validation Track)

    - + cvc5 0 352 @@ -130,7 +130,7 @@

    QF_Equality+NonLinearArith (Model Validation Track)

    - + Yices2 0 254 @@ -152,7 +152,7 @@

    QF_Equality+NonLinearArith (Model Validation Track)

    - + cvc5 0 35212881.78812875.90448 @@ -161,7 +161,7 @@

    QF_Equality+NonLinearArith (Model Validation Track)

    - + Yices2 0 2542771.7472758.9618 @@ -185,7 +185,6 @@

    QF_Equality+NonLinearArith (Model Validation Track)

    - + - diff --git a/archive/2023/results/qf-equality-nonlineararith-proof-exhibition.html b/archive/2023/results/qf-equality-nonlineararith-proof-exhibition.html index 2e1463ff..b8d23dbe 100644 --- a/archive/2023/results/qf-equality-nonlineararith-proof-exhibition.html +++ b/archive/2023/results/qf-equality-nonlineararith-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Equality+NonLinearArith (Proof Exhibition Track)

    Competition results for the QF_Equality+NonLinearArith - + division - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_Equality+NonLinearArith (Proof Exhibition Track)

    - + cvc5-lfsc 0 214 @@ -130,7 +130,7 @@

    QF_Equality+NonLinearArith (Proof Exhibition Track)

    - + cvc5 0 172 @@ -141,7 +141,7 @@

    QF_Equality+NonLinearArith (Proof Exhibition Track)

    - + SMTInterpol 0 94 @@ -163,7 +163,7 @@

    QF_Equality+NonLinearArith (Proof Exhibition Track)

    - + cvc5-lfsc 0 214781.709746.20928028 @@ -172,7 +172,7 @@

    QF_Equality+NonLinearArith (Proof Exhibition Track)

    - + cvc5 0 1721495.3271481.6570062 @@ -181,7 +181,7 @@

    QF_Equality+NonLinearArith (Proof Exhibition Track)

    - + SMTInterpol 0 941345.299951.208148010 @@ -205,7 +205,6 @@

    QF_Equality+NonLinearArith (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/qf-equality-nonlineararith-single-query.html b/archive/2023/results/qf-equality-nonlineararith-single-query.html index eb3907e5..a72a7c1f 100644 --- a/archive/2023/results/qf-equality-nonlineararith-single-query.html +++ b/archive/2023/results/qf-equality-nonlineararith-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Equality+NonLinearArith (Single Query Track)

    Competition results for the QF_Equality+NonLinearArith - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_Equality+NonLinearArith (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5Yices2 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + cvc5 0 374 @@ -142,7 +142,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + Yices2 0 370 @@ -153,7 +153,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + 2020-CVC4n 0 354 @@ -164,7 +164,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + SMTInterpol 0 179 @@ -186,7 +186,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + cvc5 0 37412927.27712909.5483742731011040104 @@ -195,7 +195,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + Yices2 0 3708857.9648812.164370288821080108 @@ -204,7 +204,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + 2020-CVC4n 0 3549204.8369206.09235426391124099 @@ -213,7 +213,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + SMTInterpol 0 1792593.0991600.99417914237299010 @@ -233,7 +233,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + Yices2 0 2888578.3818579.502288288024166108 @@ -242,7 +242,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + cvc5 0 27310314.68210305.245273273039166104 @@ -251,7 +251,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + 2020-CVC4n 0 2636548.2326549.11826326304916699 @@ -260,7 +260,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + SMTInterpol 0 1421555.28842.7142142017016610 @@ -280,7 +280,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + cvc5 0 1012612.5952604.304101010130347104 @@ -289,7 +289,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + 2020-CVC4n 0 912656.6042656.973910914034799 @@ -298,7 +298,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + Yices2 0 82279.583232.6628208249347108 @@ -307,7 +307,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + SMTInterpol 0 371037.82758.293370379434710 @@ -327,7 +327,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + cvc5 0 325775.274755.626325233921530153 @@ -336,7 +336,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + 2020-CVC4n 0 307420.028419.992307227801710149 @@ -345,7 +345,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + Yices2 0 302317.341317.487302225771760176 @@ -354,7 +354,7 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + SMTInterpol 0 162772.224325.53616213032316028 @@ -378,7 +378,6 @@

    QF_Equality+NonLinearArith (Single Query Track)

    - + - diff --git a/archive/2023/results/qf-equality-nonlineararith-unsat-core.html b/archive/2023/results/qf-equality-nonlineararith-unsat-core.html index 6617dc0e..4e4be8be 100644 --- a/archive/2023/results/qf-equality-nonlineararith-unsat-core.html +++ b/archive/2023/results/qf-equality-nonlineararith-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Equality+NonLinearArith (Unsat Core Track)

    Competition results for the QF_Equality+NonLinearArith - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_Equality+NonLinearArith (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    QF_Equality+NonLinearArith (Unsat Core Track)

    - + 2021-cvc5-ucn 0 181341 @@ -137,7 +137,7 @@

    QF_Equality+NonLinearArith (Unsat Core Track)

    - + cvc5 0 132778 @@ -148,7 +148,7 @@

    QF_Equality+NonLinearArith (Unsat Core Track)

    - + SMTInterpol 0 71257 @@ -170,7 +170,7 @@

    QF_Equality+NonLinearArith (Unsat Core Track)

    - + 2021-cvc5-ucn 0 1813412070.4062041.2319 @@ -179,7 +179,7 @@

    QF_Equality+NonLinearArith (Unsat Core Track)

    - + cvc5 0 1327781879.3471819.61324 @@ -188,7 +188,7 @@

    QF_Equality+NonLinearArith (Unsat Core Track)

    - + SMTInterpol 0 712571088.882780.51910 @@ -212,7 +212,6 @@

    QF_Equality+NonLinearArith (Unsat Core Track)

    - + - diff --git a/archive/2023/results/qf-equality-proof-exhibition.html b/archive/2023/results/qf-equality-proof-exhibition.html index 6b83b0ef..4df63f29 100644 --- a/archive/2023/results/qf-equality-proof-exhibition.html +++ b/archive/2023/results/qf-equality-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Equality (Proof Exhibition Track)

    Competition results for the QF_Equality - + division - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_Equality (Proof Exhibition Track)

    - + cvc5-lfsc 0 1200 @@ -130,7 +130,7 @@

    QF_Equality (Proof Exhibition Track)

    - + SMTInterpol 0 1195 @@ -141,7 +141,7 @@

    QF_Equality (Proof Exhibition Track)

    - + cvc5 0 97 @@ -163,7 +163,7 @@

    QF_Equality (Proof Exhibition Track)

    - + cvc5-lfsc 0 120064665.79364540.06740040 @@ -172,7 +172,7 @@

    QF_Equality (Proof Exhibition Track)

    - + SMTInterpol 0 119532057.94718370.09145045 @@ -181,7 +181,7 @@

    QF_Equality (Proof Exhibition Track)

    - + cvc5 0 971356.8281300.188114301110 @@ -205,7 +205,6 @@

    QF_Equality (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/qf-equality-single-query.html b/archive/2023/results/qf-equality-single-query.html index 356aaf92..c951143e 100644 --- a/archive/2023/results/qf-equality-single-query.html +++ b/archive/2023/results/qf-equality-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Equality (Single Query Track)

    Competition results for the QF_Equality - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_Equality (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Yices2Yices2Yices2 - - + + Yices2 - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_Equality (Single Query Track)

    - + 2022-Yices2n 0 3757 @@ -142,7 +142,7 @@

    QF_Equality (Single Query Track)

    - + Yices2 0 3757 @@ -153,7 +153,7 @@

    QF_Equality (Single Query Track)

    - + OpenSMT 0 3757 @@ -164,7 +164,7 @@

    QF_Equality (Single Query Track)

    - + cvc5 0 3755 @@ -175,7 +175,7 @@

    QF_Equality (Single Query Track)

    - + SMTInterpol 0 3686 @@ -197,7 +197,7 @@

    QF_Equality (Single Query Track)

    - + 2022-Yices2n 0 3757648.2653.935375715562201000 @@ -206,7 +206,7 @@

    QF_Equality (Single Query Track)

    - + Yices2 0 3757679.032680.382375715562201000 @@ -215,7 +215,7 @@

    QF_Equality (Single Query Track)

    - + OpenSMT 0 37576559.6516495.544375715562201000 @@ -224,7 +224,7 @@

    QF_Equality (Single Query Track)

    - + cvc5 0 37555303.1765281.273375515562199202 @@ -233,7 +233,7 @@

    QF_Equality (Single Query Track)

    - + SMTInterpol 0 369344480.76921508.28636931556213764064 @@ -253,7 +253,7 @@

    QF_Equality (Single Query Track)

    - + 2022-Yices2n 0 155660.87763.668155615560022010 @@ -262,7 +262,7 @@

    QF_Equality (Single Query Track)

    - + Yices2 0 155661.6564.58155615560022010 @@ -271,7 +271,7 @@

    QF_Equality (Single Query Track)

    - + OpenSMT 0 1556354.273353.612155615560022010 @@ -280,7 +280,7 @@

    QF_Equality (Single Query Track)

    - + cvc5 0 1556543.049537.965155615560022012 @@ -289,7 +289,7 @@

    QF_Equality (Single Query Track)

    - + SMTInterpol 0 15564351.7891738.4451556155600220164 @@ -309,7 +309,7 @@

    QF_Equality (Single Query Track)

    - + 2022-Yices2n 0 2201587.323590.266220102201015560 @@ -318,7 +318,7 @@

    QF_Equality (Single Query Track)

    - + Yices2 0 2201617.382615.801220102201015560 @@ -327,7 +327,7 @@

    QF_Equality (Single Query Track)

    - + OpenSMT 0 22016205.3786141.931220102201015560 @@ -336,7 +336,7 @@

    QF_Equality (Single Query Track)

    - + cvc5 0 21994760.1274743.307219902199215562 @@ -345,7 +345,7 @@

    QF_Equality (Single Query Track)

    - + SMTInterpol 0 213740128.98119769.84221370213764155664 @@ -365,7 +365,7 @@

    QF_Equality (Single Query Track)

    - + 2022-Yices2n 0 3754252.93258.604375415562198303 @@ -374,7 +374,7 @@

    QF_Equality (Single Query Track)

    - + Yices2 0 3754260.859262.116375415562198303 @@ -383,7 +383,7 @@

    QF_Equality (Single Query Track)

    - + cvc5 0 37192094.3522071.89837191556216338038 @@ -392,7 +392,7 @@

    QF_Equality (Single Query Track)

    - + OpenSMT 0 37122095.4412083.24237121556215645045 @@ -401,7 +401,7 @@

    QF_Equality (Single Query Track)

    - + SMTInterpol 0 361121810.2559155.7243611155620551460146 @@ -425,7 +425,6 @@

    QF_Equality (Single Query Track)

    - + - diff --git a/archive/2023/results/qf-equality-unsat-core.html b/archive/2023/results/qf-equality-unsat-core.html index e69fdaca..8306f5e1 100644 --- a/archive/2023/results/qf-equality-unsat-core.html +++ b/archive/2023/results/qf-equality-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Equality (Unsat Core Track)

    Competition results for the QF_Equality - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_Equality (Unsat Core Track)

    Sequential PerformanceParallel Performance Yices2Yices2 - - + + @@ -126,7 +126,7 @@

    QF_Equality (Unsat Core Track)

    - + 2022-z3-4.8.17n 0 338222 @@ -137,7 +137,7 @@

    QF_Equality (Unsat Core Track)

    - + Yices2 0 337376 @@ -148,7 +148,7 @@

    QF_Equality (Unsat Core Track)

    - + SMTInterpol 0 296041 @@ -159,7 +159,7 @@

    QF_Equality (Unsat Core Track)

    - + cvc5 0 249920 @@ -181,7 +181,7 @@

    QF_Equality (Unsat Core Track)

    - + 2022-z3-4.8.17n 0 3382225448.885433.7830 @@ -190,7 +190,7 @@

    QF_Equality (Unsat Core Track)

    - + Yices2 0 3373763598.7383538.3340 @@ -199,7 +199,7 @@

    QF_Equality (Unsat Core Track)

    - + SMTInterpol 0 30035537245.5618020.33921 @@ -208,7 +208,7 @@

    QF_Equality (Unsat Core Track)

    - + cvc5 0 2499205227.2695170.3321 @@ -232,7 +232,6 @@

    QF_Equality (Unsat Core Track)

    - + - diff --git a/archive/2023/results/qf-fp-incremental.html b/archive/2023/results/qf-fp-incremental.html index 24a06b36..187eb4fe 100644 --- a/archive/2023/results/qf-fp-incremental.html +++ b/archive/2023/results/qf-fp-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_FP (Incremental Track)

    Competition results for the QF_FP - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_FP (Incremental Track)

    Parallel Performance Bitwuzla - - + + @@ -124,7 +124,7 @@

    QF_FP (Incremental Track)

    - + 2022-Bitwuzlan 0 6630.04.4700 @@ -133,7 +133,7 @@

    QF_FP (Incremental Track)

    - + Bitwuzla 0 6630.05.6500 @@ -142,7 +142,7 @@

    QF_FP (Incremental Track)

    - + cvc5 0 6632.2220.2700 @@ -166,7 +166,6 @@

    QF_FP (Incremental Track)

    - + - diff --git a/archive/2023/results/qf-fp-model-validation.html b/archive/2023/results/qf-fp-model-validation.html index 742663e1..e05ec3bb 100644 --- a/archive/2023/results/qf-fp-model-validation.html +++ b/archive/2023/results/qf-fp-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_FP (Model Validation Track)

    Competition results for the QF_FP - + logic - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_FP (Model Validation Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    QF_FP (Model Validation Track)

    - + 2022-Bitwuzlan 0 10088 @@ -137,7 +137,7 @@

    QF_FP (Model Validation Track)

    - + cvc5 0 10086 @@ -148,7 +148,7 @@

    QF_FP (Model Validation Track)

    - + Bitwuzla 0 10085 @@ -159,7 +159,7 @@

    QF_FP (Model Validation Track)

    - + Bitwuzla Fixedn 0 10085 @@ -181,7 +181,7 @@

    QF_FP (Model Validation Track)

    - + 2022-Bitwuzlan 0 100885802.0585841.2337 @@ -190,7 +190,7 @@

    QF_FP (Model Validation Track)

    - + cvc5 0 100864365.2864362.63744 @@ -199,7 +199,7 @@

    QF_FP (Model Validation Track)

    - + Bitwuzla 0 100854475.7934499.48542 @@ -208,7 +208,7 @@

    QF_FP (Model Validation Track)

    - + Bitwuzla Fixedn 0 100854493.2694547.6742 @@ -232,7 +232,6 @@

    QF_FP (Model Validation Track)

    - + - diff --git a/archive/2023/results/qf-fp-proof-exhibition.html b/archive/2023/results/qf-fp-proof-exhibition.html index e07f2a2e..69b06ba7 100644 --- a/archive/2023/results/qf-fp-proof-exhibition.html +++ b/archive/2023/results/qf-fp-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_FP (Proof Exhibition Track)

    Competition results for the QF_FP - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_FP (Proof Exhibition Track)

    - + cvc5-lfsc 0 5012 @@ -130,7 +130,7 @@

    QF_FP (Proof Exhibition Track)

    - + cvc5 0 5004 @@ -152,7 +152,7 @@

    QF_FP (Proof Exhibition Track)

    - + cvc5-lfsc 0 5012327.413322.6274948 @@ -161,7 +161,7 @@

    QF_FP (Proof Exhibition Track)

    - + cvc5 0 5004200.813195.7515757 @@ -185,7 +185,6 @@

    QF_FP (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/qf-fp-single-query.html b/archive/2023/results/qf-fp-single-query.html index 39717a2d..a219eebd 100644 --- a/archive/2023/results/qf-fp-single-query.html +++ b/archive/2023/results/qf-fp-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_FP (Single Query Track)

    Competition results for the QF_FP - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_FP (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) BitwuzlaBitwuzlacvc5 - - + + COLIBRI - - + + COLIBRI - + @@ -131,7 +131,7 @@

    QF_FP (Single Query Track)

    - + 2022-Bitwuzlan 0 231 @@ -142,7 +142,7 @@

    QF_FP (Single Query Track)

    - + Bitwuzla 0 228 @@ -153,7 +153,7 @@

    QF_FP (Single Query Track)

    - + Bitwuzla Fixedn 0 228 @@ -164,7 +164,7 @@

    QF_FP (Single Query Track)

    - + COLIBRI 0 225 @@ -175,7 +175,7 @@

    QF_FP (Single Query Track)

    - + cvc5 0 224 @@ -186,7 +186,7 @@

    QF_FP (Single Query Track)

    - + Z3-Owl Fixedn 0 169 @@ -197,7 +197,7 @@

    QF_FP (Single Query Track)

    - + Z3-Owl 114 159 @@ -219,7 +219,7 @@

    QF_FP (Single Query Track)

    - + 2022-Bitwuzlan 0 23115876.44315879.672231138934242 @@ -228,7 +228,7 @@

    QF_FP (Single Query Track)

    - + Bitwuzla 0 22818288.54218265.961228135934545 @@ -237,7 +237,7 @@

    QF_FP (Single Query Track)

    - + Bitwuzla Fixedn 0 22818367.8718364.861228135934545 @@ -246,7 +246,7 @@

    QF_FP (Single Query Track)

    - + COLIBRI 0 2251958.3221945.562225126994848 @@ -255,7 +255,7 @@

    QF_FP (Single Query Track)

    - + cvc5 0 22419119.62919105.251224135894949 @@ -264,7 +264,7 @@

    QF_FP (Single Query Track)

    - + Z3-Owl Fixedn 0 16930874.22330858.04216910366104104 @@ -273,7 +273,7 @@

    QF_FP (Single Query Track)

    - + Z3-Owl 114 159111.675111.61515915451140 @@ -293,7 +293,7 @@

    QF_FP (Single Query Track)

    - + 2022-Bitwuzlan 0 1387063.6427065.18813813801611942 @@ -302,7 +302,7 @@

    QF_FP (Single Query Track)

    - + cvc5 0 1356220.5926204.01513513501911949 @@ -311,7 +311,7 @@

    QF_FP (Single Query Track)

    - + Bitwuzla 0 1356606.7486608.4113513501911945 @@ -320,7 +320,7 @@

    QF_FP (Single Query Track)

    - + Bitwuzla Fixedn 0 1356623.8716623.99113513501911945 @@ -329,7 +329,7 @@

    QF_FP (Single Query Track)

    - + COLIBRI 0 1261500.0131496.39212612602811948 @@ -338,7 +338,7 @@

    QF_FP (Single Query Track)

    - + Z3-Owl Fixedn 0 10317528.72117533.047103103051119104 @@ -347,7 +347,7 @@

    QF_FP (Single Query Track)

    - + Z3-Owl 114 154108.12108.06154154001190 @@ -367,7 +367,7 @@

    QF_FP (Single Query Track)

    - + COLIBRI 0 99458.308449.17990992015448 @@ -376,7 +376,7 @@

    QF_FP (Single Query Track)

    - + 2022-Bitwuzlan 0 938812.8018814.484930932615442 @@ -385,7 +385,7 @@

    QF_FP (Single Query Track)

    - + Bitwuzla 0 9311681.79411657.551930932615445 @@ -394,7 +394,7 @@

    QF_FP (Single Query Track)

    - + Bitwuzla Fixedn 0 9311743.99911740.869930932615445 @@ -403,7 +403,7 @@

    QF_FP (Single Query Track)

    - + cvc5 0 8912899.03712901.236890893015449 @@ -412,7 +412,7 @@

    QF_FP (Single Query Track)

    - + Z3-Owl Fixedn 0 6613345.50213324.9956606653154104 @@ -421,7 +421,7 @@

    QF_FP (Single Query Track)

    - + Z3-Owl 0 53.5553.5555051141540 @@ -441,7 +441,7 @@

    QF_FP (Single Query Track)

    - + COLIBRI 0 213563.978551.111213117966060 @@ -450,7 +450,7 @@

    QF_FP (Single Query Track)

    - + 2022-Bitwuzlan 0 173614.307614.39417310865100100 @@ -459,7 +459,7 @@

    QF_FP (Single Query Track)

    - + Bitwuzla Fixedn 0 167589.225583.6716710364106106 @@ -468,7 +468,7 @@

    QF_FP (Single Query Track)

    - + Bitwuzla 0 167611.434585.94716710364106106 @@ -477,7 +477,7 @@

    QF_FP (Single Query Track)

    - + cvc5 0 143782.309764.8981439251130130 @@ -486,7 +486,7 @@

    QF_FP (Single Query Track)

    - + Z3-Owl Fixedn 0 40609.254587.807402416233233 @@ -495,7 +495,7 @@

    QF_FP (Single Query Track)

    - + Z3-Owl 114 159111.675111.61515915451140 @@ -519,7 +519,6 @@

    QF_FP (Single Query Track)

    - + - diff --git a/archive/2023/results/qf-fp-unsat-core.html b/archive/2023/results/qf-fp-unsat-core.html index 94b408ad..1864d464 100644 --- a/archive/2023/results/qf-fp-unsat-core.html +++ b/archive/2023/results/qf-fp-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_FP (Unsat Core Track)

    Competition results for the QF_FP - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_FP (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    QF_FP (Unsat Core Track)

    - + Bitwuzla Fixedn 0 87 @@ -137,7 +137,7 @@

    QF_FP (Unsat Core Track)

    - + cvc5 0 86 @@ -148,7 +148,7 @@

    QF_FP (Unsat Core Track)

    - + 2022-z3-4.8.17n 0 20 @@ -159,7 +159,7 @@

    QF_FP (Unsat Core Track)

    - + Bitwuzla 17* 5 @@ -181,7 +181,7 @@

    QF_FP (Unsat Core Track)

    - + Bitwuzla Fixedn 0 877890.6667948.8314 @@ -190,7 +190,7 @@

    QF_FP (Unsat Core Track)

    - + cvc5 0 865904.4785901.5617 @@ -199,7 +199,7 @@

    QF_FP (Unsat Core Track)

    - + 2022-z3-4.8.17n 0 209448.7789323.769733 @@ -208,7 +208,7 @@

    QF_FP (Unsat Core Track)

    - + Bitwuzla 17* 59060.0079109.64113 @@ -235,7 +235,6 @@

    QF_FP (Unsat Core Track)

    - + - diff --git a/archive/2023/results/qf-fparith-incremental.html b/archive/2023/results/qf-fparith-incremental.html index f2a3f75e..d771d3b2 100644 --- a/archive/2023/results/qf-fparith-incremental.html +++ b/archive/2023/results/qf-fparith-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_FPArith (Incremental Track)

    Competition results for the QF_FPArith - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_FPArith (Incremental Track)

    Parallel Performance Bitwuzla - - + + @@ -124,7 +124,7 @@

    QF_FPArith (Incremental Track)

    - + Bitwuzla 0 31641736869.0637934.4759035 @@ -133,7 +133,7 @@

    QF_FPArith (Incremental Track)

    - + 2022-Bitwuzlan 0 31637327097.4728131.8103033 @@ -142,7 +142,7 @@

    QF_FPArith (Incremental Track)

    - + cvc5 0 2758301642851.621644196.594064601626 @@ -166,7 +166,6 @@

    QF_FPArith (Incremental Track)

    - + - diff --git a/archive/2023/results/qf-fparith-model-validation.html b/archive/2023/results/qf-fparith-model-validation.html index 2460bdf7..49d75bcc 100644 --- a/archive/2023/results/qf-fparith-model-validation.html +++ b/archive/2023/results/qf-fparith-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_FPArith (Model Validation Track)

    Competition results for the QF_FPArith - + division - + in the Model Validation Track.

    @@ -119,7 +119,7 @@

    QF_FPArith (Model Validation Track)

    - + Bitwuzla 0 24362 @@ -130,7 +130,7 @@

    QF_FPArith (Model Validation Track)

    - + Bitwuzla Fixedn 0 24361 @@ -141,7 +141,7 @@

    QF_FPArith (Model Validation Track)

    - + cvc5 0 24351 @@ -152,7 +152,7 @@

    QF_FPArith (Model Validation Track)

    - + 2022-Bitwuzlan 0 17289 @@ -174,7 +174,7 @@

    QF_FPArith (Model Validation Track)

    - + Bitwuzla 0 243626118.6916161.28243 @@ -183,7 +183,7 @@

    QF_FPArith (Model Validation Track)

    - + Bitwuzla Fixedn 0 243616131.1266203.6543 @@ -192,7 +192,7 @@

    QF_FPArith (Model Validation Track)

    - + cvc5 0 243519504.9169466.66756 @@ -201,7 +201,7 @@

    QF_FPArith (Model Validation Track)

    - + 2022-Bitwuzlan 0 172897472.0567513.25639 @@ -225,7 +225,6 @@

    QF_FPArith (Model Validation Track)

    - + - diff --git a/archive/2023/results/qf-fparith-proof-exhibition.html b/archive/2023/results/qf-fparith-proof-exhibition.html index 6f32da43..8a1d648c 100644 --- a/archive/2023/results/qf-fparith-proof-exhibition.html +++ b/archive/2023/results/qf-fparith-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_FPArith (Proof Exhibition Track)

    Competition results for the QF_FPArith - + division - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_FPArith (Proof Exhibition Track)

    - + cvc5-lfsc 0 6406 @@ -130,7 +130,7 @@

    QF_FPArith (Proof Exhibition Track)

    - + cvc5 0 5385 @@ -152,7 +152,7 @@

    QF_FPArith (Proof Exhibition Track)

    - + cvc5-lfsc 0 640656991.28656802.0066350614 @@ -161,7 +161,7 @@

    QF_FPArith (Proof Exhibition Track)

    - + cvc5 0 53851499.4891470.803165601639 @@ -185,7 +185,6 @@

    QF_FPArith (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/qf-fparith-single-query.html b/archive/2023/results/qf-fparith-single-query.html index e06eb08a..ec3ed358 100644 --- a/archive/2023/results/qf-fparith-single-query.html +++ b/archive/2023/results/qf-fparith-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_FPArith (Single Query Track)

    Competition results for the QF_FPArith - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_FPArith (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) BitwuzlaBitwuzlaBitwuzla - - + + Bitwuzla - - + + Bitwuzla - + @@ -131,7 +131,7 @@

    QF_FPArith (Single Query Track)

    - + Bitwuzla 0 1386 @@ -142,7 +142,7 @@

    QF_FPArith (Single Query Track)

    - + Bitwuzla Fixedn 0 1386 @@ -153,7 +153,7 @@

    QF_FPArith (Single Query Track)

    - + 2022-Bitwuzlan 0 1374 @@ -164,7 +164,7 @@

    QF_FPArith (Single Query Track)

    - + cvc5 0 1258 @@ -175,7 +175,7 @@

    QF_FPArith (Single Query Track)

    - + COLIBRI 0 1206 @@ -186,7 +186,7 @@

    QF_FPArith (Single Query Track)

    - + Z3-Owl Fixedn 0 570 @@ -197,7 +197,7 @@

    QF_FPArith (Single Query Track)

    - + Z3-Owl 357 330 @@ -219,7 +219,7 @@

    QF_FPArith (Single Query Track)

    - + Bitwuzla 0 138627893.07127844.487138650787956056 @@ -228,7 +228,7 @@

    QF_FPArith (Single Query Track)

    - + Bitwuzla Fixedn 0 138627980.92627918.632138650787956056 @@ -237,7 +237,7 @@

    QF_FPArith (Single Query Track)

    - + 2022-Bitwuzlan 0 137426288.75326293.004137449787768052 @@ -246,7 +246,7 @@

    QF_FPArith (Single Query Track)

    - + cvc5 0 125852579.51352543.2161258474784184081 @@ -255,7 +255,7 @@

    QF_FPArith (Single Query Track)

    - + COLIBRI 0 12064372.7574345.03312064637432360110 @@ -264,7 +264,7 @@

    QF_FPArith (Single Query Track)

    - + Z3-Owl Fixedn 0 57055841.52355756.141570253317117755117 @@ -273,7 +273,7 @@

    QF_FPArith (Single Query Track)

    - + Z3-Owl 357 330269.291269.287330304263577550 @@ -293,7 +293,7 @@

    QF_FPArith (Single Query Track)

    - + Bitwuzla 0 5077848.1617840.73150750701991656 @@ -302,7 +302,7 @@

    QF_FPArith (Single Query Track)

    - + Bitwuzla Fixedn 0 5077870.5817863.48850750701991656 @@ -311,7 +311,7 @@

    QF_FPArith (Single Query Track)

    - + 2022-Bitwuzlan 0 4978814.2648816.30249749702991652 @@ -320,7 +320,7 @@

    QF_FPArith (Single Query Track)

    - + cvc5 0 47410760.37410730.27947447405291681 @@ -329,7 +329,7 @@

    QF_FPArith (Single Query Track)

    - + COLIBRI 0 4632288.6532283.283463463063916110 @@ -338,7 +338,7 @@

    QF_FPArith (Single Query Track)

    - + Z3-Owl Fixedn 0 25323721.21823683.4382532530511138117 @@ -347,7 +347,7 @@

    QF_FPArith (Single Query Track)

    - + Z3-Owl 357 304217.641217.6193043040011380 @@ -367,7 +367,7 @@

    QF_FPArith (Single Query Track)

    - + Bitwuzla 0 87920044.9120003.75687908793752656 @@ -376,7 +376,7 @@

    QF_FPArith (Single Query Track)

    - + Bitwuzla Fixedn 0 87920110.34520055.14587908793752656 @@ -385,7 +385,7 @@

    QF_FPArith (Single Query Track)

    - + 2022-Bitwuzlan 0 87717474.4917476.70287708773952652 @@ -394,7 +394,7 @@

    QF_FPArith (Single Query Track)

    - + cvc5 0 78441819.13841812.938784078413252681 @@ -403,7 +403,7 @@

    QF_FPArith (Single Query Track)

    - + COLIBRI 0 7432084.1042061.757430743173526110 @@ -412,7 +412,7 @@

    QF_FPArith (Single Query Track)

    - + Z3-Owl Fixedn 0 31732120.30532072.7033170317661059117 @@ -421,7 +421,7 @@

    QF_FPArith (Single Query Track)

    - + Z3-Owl 0 2651.6551.6682602635710590 @@ -441,7 +441,7 @@

    QF_FPArith (Single Query Track)

    - + Bitwuzla Fixedn 0 12852011.3231997.13412854678181570157 @@ -450,7 +450,7 @@

    QF_FPArith (Single Query Track)

    - + Bitwuzla 0 12852050.611998.07312854678181570157 @@ -459,7 +459,7 @@

    QF_FPArith (Single Query Track)

    - + 2022-Bitwuzlan 0 12671857.4811857.16912674568111750159 @@ -468,7 +468,7 @@

    QF_FPArith (Single Query Track)

    - + COLIBRI 0 11832382.682354.70811834517322590133 @@ -477,7 +477,7 @@

    QF_FPArith (Single Query Track)

    - + cvc5 0 9854210.144165.1219853875984570354 @@ -486,7 +486,7 @@

    QF_FPArith (Single Query Track)

    - + Z3-Owl Fixedn 0 3012868.1392848.425301132169386755386 @@ -495,7 +495,7 @@

    QF_FPArith (Single Query Track)

    - + Z3-Owl 357 330269.291269.287330304263577550 @@ -519,7 +519,6 @@

    QF_FPArith (Single Query Track)

    - + - diff --git a/archive/2023/results/qf-fparith-unsat-core.html b/archive/2023/results/qf-fparith-unsat-core.html index 5bfe2b91..f11ff7f6 100644 --- a/archive/2023/results/qf-fparith-unsat-core.html +++ b/archive/2023/results/qf-fparith-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_FPArith (Unsat Core Track)

    Competition results for the QF_FPArith - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_FPArith (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    QF_FPArith (Unsat Core Track)

    - + Bitwuzla Fixedn 0 33798 @@ -137,7 +137,7 @@

    QF_FPArith (Unsat Core Track)

    - + cvc5 0 32370 @@ -148,7 +148,7 @@

    QF_FPArith (Unsat Core Track)

    - + 2022-z3-4.8.17n 0 32251 @@ -159,7 +159,7 @@

    QF_FPArith (Unsat Core Track)

    - + Bitwuzla 17* 33716 @@ -181,7 +181,7 @@

    QF_FPArith (Unsat Core Track)

    - + Bitwuzla Fixedn 0 3379812043.37712103.75228 @@ -190,7 +190,7 @@

    QF_FPArith (Unsat Core Track)

    - + cvc5 0 3237038328.1138218.47451 @@ -199,7 +199,7 @@

    QF_FPArith (Unsat Core Track)

    - + 2022-z3-4.8.17n 0 3225158461.6258184.488810 @@ -208,7 +208,7 @@

    QF_FPArith (Unsat Core Track)

    - + Bitwuzla 17* 3371613194.71613234.62327 @@ -235,7 +235,6 @@

    QF_FPArith (Unsat Core Track)

    - + - diff --git a/archive/2023/results/qf-fplra-model-validation.html b/archive/2023/results/qf-fplra-model-validation.html index a168a463..aa2e23b4 100644 --- a/archive/2023/results/qf-fplra-model-validation.html +++ b/archive/2023/results/qf-fplra-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_FPLRA (Model Validation Track)

    Competition results for the QF_FPLRA - + logic - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_FPLRA (Model Validation Track)

    Sequential PerformanceParallel Performance BitwuzlaBitwuzla - - + + @@ -126,7 +126,7 @@

    QF_FPLRA (Model Validation Track)

    - + Bitwuzla Fixedn 0 54 @@ -137,7 +137,7 @@

    QF_FPLRA (Model Validation Track)

    - + Bitwuzla 0 54 @@ -148,7 +148,7 @@

    QF_FPLRA (Model Validation Track)

    - + 2022-Bitwuzlan 0 54 @@ -159,7 +159,7 @@

    QF_FPLRA (Model Validation Track)

    - + cvc5 0 47 @@ -181,7 +181,7 @@

    QF_FPLRA (Model Validation Track)

    - + Bitwuzla Fixedn 0 54644.856645.1360 @@ -190,7 +190,7 @@

    QF_FPLRA (Model Validation Track)

    - + Bitwuzla 0 54649.913645.1620 @@ -199,7 +199,7 @@

    QF_FPLRA (Model Validation Track)

    - + 2022-Bitwuzlan 0 541042.7361041.3770 @@ -208,7 +208,7 @@

    QF_FPLRA (Model Validation Track)

    - + cvc5 0 47885.936886.047 @@ -232,7 +232,6 @@

    QF_FPLRA (Model Validation Track)

    - + - diff --git a/archive/2023/results/qf-fplra-proof-exhibition.html b/archive/2023/results/qf-fplra-proof-exhibition.html index 57614e68..8246d31f 100644 --- a/archive/2023/results/qf-fplra-proof-exhibition.html +++ b/archive/2023/results/qf-fplra-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_FPLRA (Proof Exhibition Track)

    Competition results for the QF_FPLRA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_FPLRA (Proof Exhibition Track)

    - + cvc5 0 1 @@ -130,7 +130,7 @@

    QF_FPLRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 1 @@ -152,7 +152,7 @@

    QF_FPLRA (Proof Exhibition Track)

    - + cvc5 0 10.0380.03744 @@ -161,7 +161,7 @@

    QF_FPLRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 10.0450.04443 @@ -185,7 +185,6 @@

    QF_FPLRA (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/qf-fplra-single-query.html b/archive/2023/results/qf-fplra-single-query.html index 66473d4a..db3258b2 100644 --- a/archive/2023/results/qf-fplra-single-query.html +++ b/archive/2023/results/qf-fplra-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_FPLRA (Single Query Track)

    Competition results for the QF_FPLRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_FPLRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) BitwuzlaBitwuzlaBitwuzla - - + + COLIBRI - - + + COLIBRI - + @@ -131,7 +131,7 @@

    QF_FPLRA (Single Query Track)

    - + Bitwuzla 0 57 @@ -142,7 +142,7 @@

    QF_FPLRA (Single Query Track)

    - + Bitwuzla Fixedn 0 57 @@ -153,7 +153,7 @@

    QF_FPLRA (Single Query Track)

    - + COLIBRI 0 56 @@ -164,7 +164,7 @@

    QF_FPLRA (Single Query Track)

    - + 2022-Bitwuzlan 0 56 @@ -175,7 +175,7 @@

    QF_FPLRA (Single Query Track)

    - + cvc5 0 49 @@ -197,7 +197,7 @@

    QF_FPLRA (Single Query Track)

    - + Bitwuzla 0 571579.0431579.3365754311 @@ -206,7 +206,7 @@

    QF_FPLRA (Single Query Track)

    - + Bitwuzla Fixedn 0 571582.6281582.9915754311 @@ -215,7 +215,7 @@

    QF_FPLRA (Single Query Track)

    - + COLIBRI 0 5666.87666.9275652422 @@ -224,7 +224,7 @@

    QF_FPLRA (Single Query Track)

    - + 2022-Bitwuzlan 0 561617.6521617.9685654222 @@ -233,7 +233,7 @@

    QF_FPLRA (Single Query Track)

    - + cvc5 0 49977.875977.9544947299 @@ -253,7 +253,7 @@

    QF_FPLRA (Single Query Track)

    - + Bitwuzla 0 54639.514639.73554540041 @@ -262,7 +262,7 @@

    QF_FPLRA (Single Query Track)

    - + Bitwuzla Fixedn 0 54643.972644.22854540041 @@ -271,7 +271,7 @@

    QF_FPLRA (Single Query Track)

    - + 2022-Bitwuzlan 0 541036.4681036.71254540042 @@ -280,7 +280,7 @@

    QF_FPLRA (Single Query Track)

    - + COLIBRI 0 5255.94255.98752520242 @@ -289,7 +289,7 @@

    QF_FPLRA (Single Query Track)

    - + cvc5 0 47879.065879.13147470749 @@ -309,7 +309,7 @@

    QF_FPLRA (Single Query Track)

    - + COLIBRI 0 410.93410.944040542 @@ -318,7 +318,7 @@

    QF_FPLRA (Single Query Track)

    - + Bitwuzla Fixedn 0 3938.656938.7633031541 @@ -327,7 +327,7 @@

    QF_FPLRA (Single Query Track)

    - + Bitwuzla 0 3939.529939.63031541 @@ -336,7 +336,7 @@

    QF_FPLRA (Single Query Track)

    - + cvc5 0 298.8198.8222022549 @@ -345,7 +345,7 @@

    QF_FPLRA (Single Query Track)

    - + 2022-Bitwuzlan 0 2581.184581.2562022542 @@ -365,7 +365,7 @@

    QF_FPLRA (Single Query Track)

    - + COLIBRI 0 5666.87666.9275652422 @@ -374,7 +374,7 @@

    QF_FPLRA (Single Query Track)

    - + Bitwuzla Fixedn 0 5131.4631.6055150177 @@ -383,7 +383,7 @@

    QF_FPLRA (Single Query Track)

    - + Bitwuzla 0 5131.48431.6555150177 @@ -392,7 +392,7 @@

    QF_FPLRA (Single Query Track)

    - + 2022-Bitwuzlan 0 5041.96741.9715049188 @@ -401,7 +401,7 @@

    QF_FPLRA (Single Query Track)

    - + cvc5 0 4639.69639.702464511212 @@ -425,7 +425,6 @@

    QF_FPLRA (Single Query Track)

    - + - diff --git a/archive/2023/results/qf-idl-model-validation.html b/archive/2023/results/qf-idl-model-validation.html index e9c8ca17..186467f7 100644 --- a/archive/2023/results/qf-idl-model-validation.html +++ b/archive/2023/results/qf-idl-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_IDL (Model Validation Track)

    Competition results for the QF_IDL - + logic - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_IDL (Model Validation Track)

    Sequential PerformanceParallel Performance Z3++Z3++ - - + + @@ -126,7 +126,7 @@

    QF_IDL (Model Validation Track)

    - + 2022-Z3++n 0 688 @@ -137,7 +137,7 @@

    QF_IDL (Model Validation Track)

    - + Z3++ 0 688 @@ -148,7 +148,7 @@

    QF_IDL (Model Validation Track)

    - + Yices2 0 651 @@ -159,7 +159,7 @@

    QF_IDL (Model Validation Track)

    - + OpenSMT 0 591 @@ -170,7 +170,7 @@

    QF_IDL (Model Validation Track)

    - + cvc5 0 568 @@ -181,7 +181,7 @@

    QF_IDL (Model Validation Track)

    - + SMTInterpol 0 446 @@ -203,7 +203,7 @@

    QF_IDL (Model Validation Track)

    - + 2022-Z3++n 0 68845228.63245250.80753 @@ -212,7 +212,7 @@

    QF_IDL (Model Validation Track)

    - + Z3++ 0 68845725.40645715.26653 @@ -221,7 +221,7 @@

    QF_IDL (Model Validation Track)

    - + Yices2 0 65120309.99920282.89291 @@ -230,7 +230,7 @@

    QF_IDL (Model Validation Track)

    - + OpenSMT 0 59154840.28454760.706150 @@ -239,7 +239,7 @@

    QF_IDL (Model Validation Track)

    - + cvc5 0 56857840.53857842.785173 @@ -248,7 +248,7 @@

    QF_IDL (Model Validation Track)

    - + SMTInterpol 0 44868021.24362013.8293 @@ -272,7 +272,6 @@

    QF_IDL (Model Validation Track)

    - + - diff --git a/archive/2023/results/qf-idl-proof-exhibition.html b/archive/2023/results/qf-idl-proof-exhibition.html index fd7eee3b..842e92b5 100644 --- a/archive/2023/results/qf-idl-proof-exhibition.html +++ b/archive/2023/results/qf-idl-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_IDL (Proof Exhibition Track)

    Competition results for the QF_IDL - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_IDL (Proof Exhibition Track)

    - + SMTInterpol 0 143 @@ -130,7 +130,7 @@

    QF_IDL (Proof Exhibition Track)

    - + cvc5-lfsc 0 143 @@ -141,7 +141,7 @@

    QF_IDL (Proof Exhibition Track)

    - + cvc5 0 22 @@ -163,7 +163,7 @@

    QF_IDL (Proof Exhibition Track)

    - + SMTInterpol 0 14320282.16115430.4458686 @@ -172,7 +172,7 @@

    QF_IDL (Proof Exhibition Track)

    - + cvc5-lfsc 0 14321574.15221508.2028686 @@ -181,7 +181,7 @@

    QF_IDL (Proof Exhibition Track)

    - + cvc5 0 22577.342572.525207205 @@ -205,7 +205,6 @@

    QF_IDL (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/qf-idl-single-query.html b/archive/2023/results/qf-idl-single-query.html index 9564fbd0..20b76d95 100644 --- a/archive/2023/results/qf-idl-single-query.html +++ b/archive/2023/results/qf-idl-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_IDL (Single Query Track)

    Competition results for the QF_IDL - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_IDL (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Z3++Z3++Z3++ - - + + Z3++ - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_IDL (Single Query Track)

    - + Z3++ 0 1041 @@ -142,7 +142,7 @@

    QF_IDL (Single Query Track)

    - + 2019-Par4n 0 981 @@ -153,7 +153,7 @@

    QF_IDL (Single Query Track)

    - + Yices2 0 963 @@ -164,7 +164,7 @@

    QF_IDL (Single Query Track)

    - + cvc5 0 902 @@ -175,7 +175,7 @@

    QF_IDL (Single Query Track)

    - + OpenSMT 0 880 @@ -186,7 +186,7 @@

    QF_IDL (Single Query Track)

    - + SMTInterpol 0 687 @@ -208,7 +208,7 @@

    QF_IDL (Single Query Track)

    - + Z3++ 0 104159582.73659480.8691041670371162162 @@ -217,7 +217,7 @@

    QF_IDL (Single Query Track)

    - + 2019-Par4n 0 1032176133.25444573.551032664368171171 @@ -226,7 +226,7 @@

    QF_IDL (Single Query Track)

    - + Yices2 0 96329563.4129536.133963634329240240 @@ -235,7 +235,7 @@

    QF_IDL (Single Query Track)

    - + cvc5 0 90279040.0878959.624902544358301301 @@ -244,7 +244,7 @@

    QF_IDL (Single Query Track)

    - + OpenSMT 0 88070196.99870099.563880572308323323 @@ -253,7 +253,7 @@

    QF_IDL (Single Query Track)

    - + SMTInterpol 0 68991542.96575915.406689419270514514 @@ -273,7 +273,7 @@

    QF_IDL (Single Query Track)

    - + Z3++ 0 67044452.36944366.335670670056477162 @@ -282,7 +282,7 @@

    QF_IDL (Single Query Track)

    - + 2019-Par4n 0 66498908.08925051.757664664062477171 @@ -291,7 +291,7 @@

    QF_IDL (Single Query Track)

    - + Yices2 0 63419202.32119173.926634634092477240 @@ -300,7 +300,7 @@

    QF_IDL (Single Query Track)

    - + OpenSMT 0 57258134.02358035.5335725720154477323 @@ -309,7 +309,7 @@

    QF_IDL (Single Query Track)

    - + cvc5 0 54455228.19255152.5865445440182477301 @@ -318,7 +318,7 @@

    QF_IDL (Single Query Track)

    - + SMTInterpol 0 41962884.84156789.3794194190307477514 @@ -338,7 +338,7 @@

    QF_IDL (Single Query Track)

    - + Z3++ 0 37115130.36715114.534371037137795162 @@ -347,7 +347,7 @@

    QF_IDL (Single Query Track)

    - + 2019-Par4n 0 36877225.16519521.792368036840795171 @@ -356,7 +356,7 @@

    QF_IDL (Single Query Track)

    - + cvc5 0 35823811.88823807.037358035850795301 @@ -365,7 +365,7 @@

    QF_IDL (Single Query Track)

    - + Yices2 0 32910361.08810362.206329032979795240 @@ -374,7 +374,7 @@

    QF_IDL (Single Query Track)

    - + OpenSMT 0 30812062.97512064.033080308100795323 @@ -383,7 +383,7 @@

    QF_IDL (Single Query Track)

    - + SMTInterpol 0 27028658.12419126.0272700270138795514 @@ -403,7 +403,7 @@

    QF_IDL (Single Query Track)

    - + 2019-Par4n 0 8564934.211336.47856566290347347 @@ -412,7 +412,7 @@

    QF_IDL (Single Query Track)

    - + Yices2 0 8521474.8671445.117852565287351351 @@ -421,7 +421,7 @@

    QF_IDL (Single Query Track)

    - + Z3++ 0 7852310.2732277.72785507278418418 @@ -430,7 +430,7 @@

    QF_IDL (Single Query Track)

    - + OpenSMT 0 5792069.9082045.2579318261624624 @@ -439,7 +439,7 @@

    QF_IDL (Single Query Track)

    - + cvc5 0 5782383.2032364.092578304274625625 @@ -448,7 +448,7 @@

    QF_IDL (Single Query Track)

    - + SMTInterpol 0 4034610.5261984.176403221182800800 @@ -472,7 +472,6 @@

    QF_IDL (Single Query Track)

    - + - diff --git a/archive/2023/results/qf-idl-unsat-core.html b/archive/2023/results/qf-idl-unsat-core.html index 4a6628a5..b1ba9e95 100644 --- a/archive/2023/results/qf-idl-unsat-core.html +++ b/archive/2023/results/qf-idl-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_IDL (Unsat Core Track)

    Competition results for the QF_IDL - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_IDL (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    QF_IDL (Unsat Core Track)

    - + cvc5 0 490495 @@ -137,7 +137,7 @@

    QF_IDL (Unsat Core Track)

    - + 2022-MathSATn 0 419674 @@ -148,7 +148,7 @@

    QF_IDL (Unsat Core Track)

    - + SMTInterpol 0 325795 @@ -159,7 +159,7 @@

    QF_IDL (Unsat Core Track)

    - + Yices2 0 75443 @@ -181,7 +181,7 @@

    QF_IDL (Unsat Core Track)

    - + cvc5 0 49049510753.26510754.97979 @@ -190,7 +190,7 @@

    QF_IDL (Unsat Core Track)

    - + 2022-MathSATn 0 4196742885.082882.63789 @@ -199,7 +199,7 @@

    QF_IDL (Unsat Core Track)

    - + SMTInterpol 0 3404116884.1383769.86684 @@ -208,7 +208,7 @@

    QF_IDL (Unsat Core Track)

    - + Yices2 0 754435567.5485568.5188 @@ -232,7 +232,6 @@

    QF_IDL (Unsat Core Track)

    - + - diff --git a/archive/2023/results/qf-lia-incremental.html b/archive/2023/results/qf-lia-incremental.html index a7d31103..05e1c4c2 100644 --- a/archive/2023/results/qf-lia-incremental.html +++ b/archive/2023/results/qf-lia-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LIA (Incremental Track)

    Competition results for the QF_LIA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_LIA (Incremental Track)

    Parallel Performance Yices2 - - + + @@ -124,7 +124,7 @@

    QF_LIA (Incremental Track)

    - + 2021-Yices2 incrementaln 0 2004034231047.3130531.1187220 @@ -133,7 +133,7 @@

    QF_LIA (Incremental Track)

    - + Yices2 Fixedn 0 2004034130722.1730201.3287320 @@ -142,7 +142,7 @@

    QF_LIA (Incremental Track)

    - + Yices2 0 2004033730932.6130397.4987720 @@ -151,7 +151,7 @@

    QF_LIA (Incremental Track)

    - + SMTInterpol 0 1687952067434.751865.91316169439 @@ -160,7 +160,7 @@

    QF_LIA (Incremental Track)

    - + cvc5 0 389492563389.3163201.491614628950 @@ -169,7 +169,7 @@

    QF_LIA (Incremental Track)

    - + OpenSMT 0 86029454665.8654631.41918092042 @@ -193,7 +193,6 @@

    QF_LIA (Incremental Track)

    - + - diff --git a/archive/2023/results/qf-lia-model-validation.html b/archive/2023/results/qf-lia-model-validation.html index 9326f8bc..795f9dd2 100644 --- a/archive/2023/results/qf-lia-model-validation.html +++ b/archive/2023/results/qf-lia-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LIA (Model Validation Track)

    Competition results for the QF_LIA - + logic - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_LIA (Model Validation Track)

    Sequential PerformanceParallel Performance Z3++Z3++ - - + + @@ -126,7 +126,7 @@

    QF_LIA (Model Validation Track)

    - + Z3++ 0 4079 @@ -137,7 +137,7 @@

    QF_LIA (Model Validation Track)

    - + 2022-Z3++n 0 4078 @@ -148,7 +148,7 @@

    QF_LIA (Model Validation Track)

    - + OpenSMT 0 4006 @@ -159,7 +159,7 @@

    QF_LIA (Model Validation Track)

    - + cvc5 0 3999 @@ -170,7 +170,7 @@

    QF_LIA (Model Validation Track)

    - + Yices2 0 3913 @@ -181,7 +181,7 @@

    QF_LIA (Model Validation Track)

    - + SMTInterpol 0 3822 @@ -203,7 +203,7 @@

    QF_LIA (Model Validation Track)

    - + Z3++ 0 407969474.70569269.424118 @@ -212,7 +212,7 @@

    QF_LIA (Model Validation Track)

    - + 2022-Z3++n 0 407867971.65367776.922118 @@ -221,7 +221,7 @@

    QF_LIA (Model Validation Track)

    - + OpenSMT 0 4006210164.737210197.138205 @@ -230,7 +230,7 @@

    QF_LIA (Model Validation Track)

    - + cvc5 0 399996508.92596349.56213 @@ -239,7 +239,7 @@

    QF_LIA (Model Validation Track)

    - + Yices2 0 391361708.64861623.772299 @@ -248,7 +248,7 @@

    QF_LIA (Model Validation Track)

    - + SMTInterpol 0 3822150971.875127443.04390 @@ -272,7 +272,6 @@

    QF_LIA (Model Validation Track)

    - + - diff --git a/archive/2023/results/qf-lia-proof-exhibition.html b/archive/2023/results/qf-lia-proof-exhibition.html index e4234e95..9c0ccef9 100644 --- a/archive/2023/results/qf-lia-proof-exhibition.html +++ b/archive/2023/results/qf-lia-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LIA (Proof Exhibition Track)

    Competition results for the QF_LIA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_LIA (Proof Exhibition Track)

    - + SMTInterpol 0 1156 @@ -130,7 +130,7 @@

    QF_LIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 695 @@ -141,7 +141,7 @@

    QF_LIA (Proof Exhibition Track)

    - + cvc5 0 498 @@ -163,7 +163,7 @@

    QF_LIA (Proof Exhibition Track)

    - + SMTInterpol 0 1156105341.81577749.7655252 @@ -172,7 +172,7 @@

    QF_LIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 69529687.53529668.296513509 @@ -181,7 +181,7 @@

    QF_LIA (Proof Exhibition Track)

    - + cvc5 0 4981263.4151253.449710704 @@ -205,7 +205,6 @@

    QF_LIA (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/qf-lia-single-query.html b/archive/2023/results/qf-lia-single-query.html index e27e12e5..c48bf0f0 100644 --- a/archive/2023/results/qf-lia-single-query.html +++ b/archive/2023/results/qf-lia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LIA (Single Query Track)

    Competition results for the QF_LIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_LIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) OpenSMTOpenSMTZ3++ - - + + SMTInterpol - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_LIA (Single Query Track)

    - + 2019-Par4n 0 5118 @@ -142,7 +142,7 @@

    QF_LIA (Single Query Track)

    - + OpenSMT 0 4936 @@ -153,7 +153,7 @@

    QF_LIA (Single Query Track)

    - + cvc5 0 4895 @@ -164,7 +164,7 @@

    QF_LIA (Single Query Track)

    - + Yices2 0 4807 @@ -175,7 +175,7 @@

    QF_LIA (Single Query Track)

    - + Z3++ 0 4778 @@ -186,7 +186,7 @@

    QF_LIA (Single Query Track)

    - + SMTInterpol 0 4753 @@ -208,7 +208,7 @@

    QF_LIA (Single Query Track)

    - + 2019-Par4n 0 514694762.66732027.2025146324419028371 @@ -217,7 +217,7 @@

    QF_LIA (Single Query Track)

    - + OpenSMT 0 4936285996.012285658.723493630661870293289 @@ -226,7 +226,7 @@

    QF_LIA (Single Query Track)

    - + cvc5 0 4895290263.674290125.359489530561839334330 @@ -235,7 +235,7 @@

    QF_LIA (Single Query Track)

    - + Yices2 0 480661999.91661997.104480629791827423423 @@ -244,7 +244,7 @@

    QF_LIA (Single Query Track)

    - + Z3++ 0 4778229774.501229450.323477831491629451418 @@ -253,7 +253,7 @@

    QF_LIA (Single Query Track)

    - + SMTInterpol 0 4754283744.501207474.941475428831871475475 @@ -273,7 +273,7 @@

    QF_LIA (Single Query Track)

    - + 2019-Par4n 0 324469470.17823421.1232443244038194771 @@ -282,7 +282,7 @@

    QF_LIA (Single Query Track)

    - + Z3++ 0 314978476.44178253.3773149314901331947418 @@ -291,7 +291,7 @@

    QF_LIA (Single Query Track)

    - + OpenSMT 0 3066203987.309203815.4673066306602161947289 @@ -300,7 +300,7 @@

    QF_LIA (Single Query Track)

    - + cvc5 0 3056140554.068140497.083056305602261947330 @@ -309,7 +309,7 @@

    QF_LIA (Single Query Track)

    - + Yices2 0 297952987.2352992.592979297903031947423 @@ -318,7 +318,7 @@

    QF_LIA (Single Query Track)

    - + SMTInterpol 0 2883140673.806116407.5642883288303991947475 @@ -338,7 +338,7 @@

    QF_LIA (Single Query Track)

    - + 2019-Par4n 0 190225292.4898606.08219020190215331271 @@ -347,7 +347,7 @@

    QF_LIA (Single Query Track)

    - + SMTInterpol 0 1871143070.69591067.376187101871463312475 @@ -356,7 +356,7 @@

    QF_LIA (Single Query Track)

    - + OpenSMT 0 187082008.70481843.256187001870473312289 @@ -365,7 +365,7 @@

    QF_LIA (Single Query Track)

    - + cvc5 0 1839149709.606149628.279183901839783312330 @@ -374,7 +374,7 @@

    QF_LIA (Single Query Track)

    - + Yices2 0 18279012.6879004.514182701827903312423 @@ -383,7 +383,7 @@

    QF_LIA (Single Query Track)

    - + Z3++ 0 1629151298.059151196.9461629016292883312418 @@ -403,7 +403,7 @@

    QF_LIA (Single Query Track)

    - + 2019-Par4n 0 50397051.8672775.368503931541885190178 @@ -412,7 +412,7 @@

    QF_LIA (Single Query Track)

    - + Yices2 0 45963407.6443398.367459627991797633633 @@ -421,7 +421,7 @@

    QF_LIA (Single Query Track)

    - + Z3++ 0 37607494.0647402.0437602638112214691455 @@ -430,7 +430,7 @@

    QF_LIA (Single Query Track)

    - + OpenSMT 0 36658211.4748109.85636652282138315641560 @@ -439,7 +439,7 @@

    QF_LIA (Single Query Track)

    - + cvc5 0 35444619.9994562.80335442361118316851685 @@ -448,7 +448,7 @@

    QF_LIA (Single Query Track)

    - + SMTInterpol 0 345623599.2869812.80534562236122017731773 @@ -472,7 +472,6 @@

    QF_LIA (Single Query Track)

    - + - diff --git a/archive/2023/results/qf-lia-unsat-core.html b/archive/2023/results/qf-lia-unsat-core.html index 6a18e297..0722af54 100644 --- a/archive/2023/results/qf-lia-unsat-core.html +++ b/archive/2023/results/qf-lia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LIA (Unsat Core Track)

    Competition results for the QF_LIA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_LIA (Unsat Core Track)

    Sequential PerformanceParallel Performance Yices2Yices2 - - + + @@ -126,7 +126,7 @@

    QF_LIA (Unsat Core Track)

    - + Yices2 0 4823265 @@ -137,7 +137,7 @@

    QF_LIA (Unsat Core Track)

    - + 2022-MathSATn 0 4368369 @@ -148,7 +148,7 @@

    QF_LIA (Unsat Core Track)

    - + cvc5 0 3460115 @@ -159,7 +159,7 @@

    QF_LIA (Unsat Core Track)

    - + SMTInterpol 0 3438608 @@ -181,7 +181,7 @@

    QF_LIA (Unsat Core Track)

    - + Yices2 0 48232655531.6575466.20916 @@ -190,7 +190,7 @@

    QF_LIA (Unsat Core Track)

    - + 2022-MathSATn 0 43683698412.8858371.08953 @@ -199,7 +199,7 @@

    QF_LIA (Unsat Core Track)

    - + cvc5 0 346011510668.18910619.9764 @@ -208,7 +208,7 @@

    QF_LIA (Unsat Core Track)

    - + SMTInterpol 0 343860820337.7315113.03437 @@ -232,7 +232,6 @@

    QF_LIA (Unsat Core Track)

    - + - diff --git a/archive/2023/results/qf-linearintarith-incremental.html b/archive/2023/results/qf-linearintarith-incremental.html index e5896ebc..f75ab81f 100644 --- a/archive/2023/results/qf-linearintarith-incremental.html +++ b/archive/2023/results/qf-linearintarith-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LinearIntArith (Incremental Track)

    Competition results for the QF_LinearIntArith - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_LinearIntArith (Incremental Track)

    Parallel Performance Yices2 - - + + @@ -124,7 +124,7 @@

    QF_LinearIntArith (Incremental Track)

    - + 2021-Yices2 incrementaln 0 2004034231047.3130531.11872020 @@ -133,7 +133,7 @@

    QF_LinearIntArith (Incremental Track)

    - + Yices2 Fixedn 0 2004034130722.1730201.32873020 @@ -142,7 +142,7 @@

    QF_LinearIntArith (Incremental Track)

    - + Yices2 0 2004033730932.6130397.49877020 @@ -151,7 +151,7 @@

    QF_LinearIntArith (Incremental Track)

    - + SMTInterpol 0 1687952067434.751865.913161694039 @@ -160,7 +160,7 @@

    QF_LinearIntArith (Incremental Track)

    - + cvc5 0 389492563389.3163201.4916146289050 @@ -169,7 +169,7 @@

    QF_LinearIntArith (Incremental Track)

    - + OpenSMT 0 86029454665.8654631.419180920042 @@ -193,7 +193,6 @@

    QF_LinearIntArith (Incremental Track)

    - + - diff --git a/archive/2023/results/qf-linearintarith-model-validation.html b/archive/2023/results/qf-linearintarith-model-validation.html index fbb46af6..2134e10a 100644 --- a/archive/2023/results/qf-linearintarith-model-validation.html +++ b/archive/2023/results/qf-linearintarith-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LinearIntArith (Model Validation Track)

    Competition results for the QF_LinearIntArith - + division - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_LinearIntArith (Model Validation Track)

    Sequential PerformanceParallel Performance Z3++Z3++ - - + + @@ -126,7 +126,7 @@

    QF_LinearIntArith (Model Validation Track)

    - + Z3++ 0 4767 @@ -137,7 +137,7 @@

    QF_LinearIntArith (Model Validation Track)

    - + 2022-Z3++n 0 4766 @@ -148,7 +148,7 @@

    QF_LinearIntArith (Model Validation Track)

    - + OpenSMT 0 4597 @@ -159,7 +159,7 @@

    QF_LinearIntArith (Model Validation Track)

    - + cvc5 0 4568 @@ -170,7 +170,7 @@

    QF_LinearIntArith (Model Validation Track)

    - + Yices2 0 4565 @@ -181,7 +181,7 @@

    QF_LinearIntArith (Model Validation Track)

    - + SMTInterpol 0 4269 @@ -203,7 +203,7 @@

    QF_LinearIntArith (Model Validation Track)

    - + Z3++ 0 4767115200.111114984.689171 @@ -212,7 +212,7 @@

    QF_LinearIntArith (Model Validation Track)

    - + 2022-Z3++n 0 4766113200.285113027.729171 @@ -221,7 +221,7 @@

    QF_LinearIntArith (Model Validation Track)

    - + OpenSMT 0 4597265005.021264957.845355 @@ -230,7 +230,7 @@

    QF_LinearIntArith (Model Validation Track)

    - + cvc5 0 4568154352.175154195.064386 @@ -239,7 +239,7 @@

    QF_LinearIntArith (Model Validation Track)

    - + Yices2 0 456582018.75681906.774390 @@ -248,7 +248,7 @@

    QF_LinearIntArith (Model Validation Track)

    - + SMTInterpol 0 4271219014.858189465.56683 @@ -272,7 +272,6 @@

    QF_LinearIntArith (Model Validation Track)

    - + - diff --git a/archive/2023/results/qf-linearintarith-proof-exhibition.html b/archive/2023/results/qf-linearintarith-proof-exhibition.html index 857b3fe7..90be601e 100644 --- a/archive/2023/results/qf-linearintarith-proof-exhibition.html +++ b/archive/2023/results/qf-linearintarith-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LinearIntArith (Proof Exhibition Track)

    Competition results for the QF_LinearIntArith - + division - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_LinearIntArith (Proof Exhibition Track)

    - + SMTInterpol 0 1302 @@ -130,7 +130,7 @@

    QF_LinearIntArith (Proof Exhibition Track)

    - + cvc5-lfsc 0 841 @@ -141,7 +141,7 @@

    QF_LinearIntArith (Proof Exhibition Track)

    - + cvc5 0 520 @@ -163,7 +163,7 @@

    QF_LinearIntArith (Proof Exhibition Track)

    - + SMTInterpol 0 1302125875.69393344.5191400140 @@ -172,7 +172,7 @@

    QF_LinearIntArith (Proof Exhibition Track)

    - + cvc5-lfsc 0 84151916.79551831.0586010597 @@ -181,7 +181,7 @@

    QF_LinearIntArith (Proof Exhibition Track)

    - + cvc5 0 5201840.7571825.9739220914 @@ -205,7 +205,6 @@

    QF_LinearIntArith (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/qf-linearintarith-single-query.html b/archive/2023/results/qf-linearintarith-single-query.html index ee40b7ca..4b323dad 100644 --- a/archive/2023/results/qf-linearintarith-single-query.html +++ b/archive/2023/results/qf-linearintarith-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LinearIntArith (Single Query Track)

    Competition results for the QF_LinearIntArith - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_LinearIntArith (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Z3++Z3++Z3++ - - + + cvc5 - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_LinearIntArith (Single Query Track)

    - + 2019-Par4n 0 6106 @@ -142,7 +142,7 @@

    QF_LinearIntArith (Single Query Track)

    - + Z3++ 0 5819 @@ -153,7 +153,7 @@

    QF_LinearIntArith (Single Query Track)

    - + OpenSMT 0 5816 @@ -164,7 +164,7 @@

    QF_LinearIntArith (Single Query Track)

    - + cvc5 0 5802 @@ -175,7 +175,7 @@

    QF_LinearIntArith (Single Query Track)

    - + Yices2 0 5776 @@ -186,7 +186,7 @@

    QF_LinearIntArith (Single Query Track)

    - + SMTInterpol 0 5444 @@ -208,7 +208,7 @@

    QF_LinearIntArith (Single Query Track)

    - + 2019-Par4n 0 6185271140.02776724.1516185390922762540242 @@ -217,7 +217,7 @@

    QF_LinearIntArith (Single Query Track)

    - + Z3++ 0 5819289357.237288931.1925819381920006137580 @@ -226,7 +226,7 @@

    QF_LinearIntArith (Single Query Track)

    - + OpenSMT 0 5816356193.01355758.2855816363821786167612 @@ -235,7 +235,7 @@

    QF_LinearIntArith (Single Query Track)

    - + cvc5 0 5802369313.977369092.5485802360122016370633 @@ -244,7 +244,7 @@

    QF_LinearIntArith (Single Query Track)

    - + Yices2 0 577591716.96791686.8875775361421616640664 @@ -253,7 +253,7 @@

    QF_LinearIntArith (Single Query Track)

    - + SMTInterpol 0 5447375468.798283494.3875447330321449920992 @@ -273,7 +273,7 @@

    QF_LinearIntArith (Single Query Track)

    - + 2019-Par4n 0 3909168378.27448473.2333909390901002430242 @@ -282,7 +282,7 @@

    QF_LinearIntArith (Single Query Track)

    - + Z3++ 0 3819122928.811122619.7123819381901892431580 @@ -291,7 +291,7 @@

    QF_LinearIntArith (Single Query Track)

    - + OpenSMT 0 3638262121.331261851.03638363803702431612 @@ -300,7 +300,7 @@

    QF_LinearIntArith (Single Query Track)

    - + Yices2 0 361472189.63172166.5963614361403952430664 @@ -309,7 +309,7 @@

    QF_LinearIntArith (Single Query Track)

    - + cvc5 0 3601195784.282195651.6893601360104082430633 @@ -318,7 +318,7 @@

    QF_LinearIntArith (Single Query Track)

    - + SMTInterpol 0 3303203585.396173206.1953303330307062430992 @@ -338,7 +338,7 @@

    QF_LinearIntArith (Single Query Track)

    - + 2019-Par4n 0 2276102761.75328250.918227602276554108242 @@ -347,7 +347,7 @@

    QF_LinearIntArith (Single Query Track)

    - + cvc5 0 2201173529.696173440.8592201022011304108633 @@ -356,7 +356,7 @@

    QF_LinearIntArith (Single Query Track)

    - + OpenSMT 0 217894071.67993907.2862178021781474114612 @@ -365,7 +365,7 @@

    QF_LinearIntArith (Single Query Track)

    - + Yices2 0 216119527.33719520.292161021611704108664 @@ -374,7 +374,7 @@

    QF_LinearIntArith (Single Query Track)

    - + SMTInterpol 0 2144171883.402110288.1922144021441874108992 @@ -383,7 +383,7 @@

    QF_LinearIntArith (Single Query Track)

    - + Z3++ 0 2000166428.427166311.482000020003254114580 @@ -403,7 +403,7 @@

    QF_LinearIntArith (Single Query Track)

    - + 2019-Par4n 0 590011989.1034114.5565900372121795390527 @@ -412,7 +412,7 @@

    QF_LinearIntArith (Single Query Track)

    - + Yices2 0 54534882.9594843.9385453336520889860986 @@ -421,7 +421,7 @@

    QF_LinearIntArith (Single Query Track)

    - + Z3++ 0 45459804.3379679.76454531451400188771873 @@ -430,7 +430,7 @@

    QF_LinearIntArith (Single Query Track)

    - + OpenSMT 0 424410281.38210155.056424426001644218872184 @@ -439,7 +439,7 @@

    QF_LinearIntArith (Single Query Track)

    - + cvc5 0 41277013.4256934.461412726661461231202312 @@ -448,7 +448,7 @@

    QF_LinearIntArith (Single Query Track)

    - + SMTInterpol 0 386228254.60511812.636386224581404257702577 @@ -472,7 +472,6 @@

    QF_LinearIntArith (Single Query Track)

    - + - diff --git a/archive/2023/results/qf-linearintarith-unsat-core.html b/archive/2023/results/qf-linearintarith-unsat-core.html index 20faf0dc..e4162d30 100644 --- a/archive/2023/results/qf-linearintarith-unsat-core.html +++ b/archive/2023/results/qf-linearintarith-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LinearIntArith (Unsat Core Track)

    Competition results for the QF_LinearIntArith - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_LinearIntArith (Unsat Core Track)

    Sequential PerformanceParallel Performance Yices2Yices2 - - + + @@ -126,7 +126,7 @@

    QF_LinearIntArith (Unsat Core Track)

    - + Yices2 0 4898708 @@ -137,7 +137,7 @@

    QF_LinearIntArith (Unsat Core Track)

    - + 2022-MathSATn 0 4788043 @@ -148,7 +148,7 @@

    QF_LinearIntArith (Unsat Core Track)

    - + cvc5 0 3950610 @@ -159,7 +159,7 @@

    QF_LinearIntArith (Unsat Core Track)

    - + SMTInterpol 0 3764403 @@ -181,7 +181,7 @@

    QF_LinearIntArith (Unsat Core Track)

    - + Yices2 0 489870811847.25311782.92104 @@ -190,7 +190,7 @@

    QF_LinearIntArith (Unsat Core Track)

    - + 2022-MathSATn 0 478804311349.74311305.511142 @@ -199,7 +199,7 @@

    QF_LinearIntArith (Unsat Core Track)

    - + cvc5 0 395061021510.83121464.347145 @@ -208,7 +208,7 @@

    QF_LinearIntArith (Unsat Core Track)

    - + SMTInterpol 0 377901927390.81618979.599123 @@ -232,7 +232,6 @@

    QF_LinearIntArith (Unsat Core Track)

    - + - diff --git a/archive/2023/results/qf-linearrealarith-incremental.html b/archive/2023/results/qf-linearrealarith-incremental.html index abada08b..a7a1f7e0 100644 --- a/archive/2023/results/qf-linearrealarith-incremental.html +++ b/archive/2023/results/qf-linearrealarith-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LinearRealArith (Incremental Track)

    Competition results for the QF_LinearRealArith - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_LinearRealArith (Incremental Track)

    Parallel Performance OpenSMT - - + + @@ -124,7 +124,7 @@

    QF_LinearRealArith (Incremental Track)

    - + 2018-MathSAT-incrementaln 0 12567577.737579.425906 @@ -133,7 +133,7 @@

    QF_LinearRealArith (Incremental Track)

    - + OpenSMT 0 9538467.248468.0856206 @@ -142,7 +142,7 @@

    QF_LinearRealArith (Incremental Track)

    - + Yices2 Fixedn 0 90911353.3111355.11606010 @@ -151,7 +151,7 @@

    QF_LinearRealArith (Incremental Track)

    - + Yices2 0 90611329.0411330.65609010 @@ -160,7 +160,7 @@

    QF_LinearRealArith (Incremental Track)

    - + cvc5 0 69511302.3911303.63820010 @@ -169,7 +169,7 @@

    QF_LinearRealArith (Incremental Track)

    - + SMTInterpol 0 51312918.3411099.421002010 @@ -193,7 +193,6 @@

    QF_LinearRealArith (Incremental Track)

    - + - diff --git a/archive/2023/results/qf-linearrealarith-model-validation.html b/archive/2023/results/qf-linearrealarith-model-validation.html index 5e888a7b..ff8838e4 100644 --- a/archive/2023/results/qf-linearrealarith-model-validation.html +++ b/archive/2023/results/qf-linearrealarith-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LinearRealArith (Model Validation Track)

    Competition results for the QF_LinearRealArith - + division - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_LinearRealArith (Model Validation Track)

    Sequential PerformanceParallel Performance OpenSMTOpenSMT - - + + @@ -126,7 +126,7 @@

    QF_LinearRealArith (Model Validation Track)

    - + 2022-OpenSMTn 0 611 @@ -137,7 +137,7 @@

    QF_LinearRealArith (Model Validation Track)

    - + OpenSMT 0 609 @@ -148,7 +148,7 @@

    QF_LinearRealArith (Model Validation Track)

    - + Yices2 0 608 @@ -159,7 +159,7 @@

    QF_LinearRealArith (Model Validation Track)

    - + cvc5 0 601 @@ -170,7 +170,7 @@

    QF_LinearRealArith (Model Validation Track)

    - + SMTInterpol 0 580 @@ -181,7 +181,7 @@

    QF_LinearRealArith (Model Validation Track)

    - + Yaga 0 409 @@ -203,7 +203,7 @@

    QF_LinearRealArith (Model Validation Track)

    - + 2022-OpenSMTn 0 61124675.18424670.63720 @@ -212,7 +212,7 @@

    QF_LinearRealArith (Model Validation Track)

    - + OpenSMT 0 60923952.72123935.3422 @@ -221,7 +221,7 @@

    QF_LinearRealArith (Model Validation Track)

    - + Yices2 0 60812932.98912935.27423 @@ -230,7 +230,7 @@

    QF_LinearRealArith (Model Validation Track)

    - + cvc5 0 60127683.35527644.34530 @@ -239,7 +239,7 @@

    QF_LinearRealArith (Model Validation Track)

    - + SMTInterpol 0 58041731.45436739.50451 @@ -248,7 +248,7 @@

    QF_LinearRealArith (Model Validation Track)

    - + Yaga 0 40910502.7110505.51184 @@ -272,7 +272,6 @@

    QF_LinearRealArith (Model Validation Track)

    - + - diff --git a/archive/2023/results/qf-linearrealarith-proof-exhibition.html b/archive/2023/results/qf-linearrealarith-proof-exhibition.html index 6384d802..7184bcb0 100644 --- a/archive/2023/results/qf-linearrealarith-proof-exhibition.html +++ b/archive/2023/results/qf-linearrealarith-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LinearRealArith (Proof Exhibition Track)

    Competition results for the QF_LinearRealArith - + division - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_LinearRealArith (Proof Exhibition Track)

    - + SMTInterpol 0 231 @@ -130,7 +130,7 @@

    QF_LinearRealArith (Proof Exhibition Track)

    - + cvc5-lfsc 0 209 @@ -141,7 +141,7 @@

    QF_LinearRealArith (Proof Exhibition Track)

    - + cvc5 0 61 @@ -163,7 +163,7 @@

    QF_LinearRealArith (Proof Exhibition Track)

    - + SMTInterpol 0 23126536.8821969.07864064 @@ -172,7 +172,7 @@

    QF_LinearRealArith (Proof Exhibition Track)

    - + cvc5-lfsc 0 20922705.91422698.85286086 @@ -181,7 +181,7 @@

    QF_LinearRealArith (Proof Exhibition Track)

    - + cvc5 0 611040.4471030.8862340224 @@ -205,7 +205,6 @@

    QF_LinearRealArith (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/qf-linearrealarith-single-query.html b/archive/2023/results/qf-linearrealarith-single-query.html index c5ccd472..70099104 100644 --- a/archive/2023/results/qf-linearrealarith-single-query.html +++ b/archive/2023/results/qf-linearrealarith-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LinearRealArith (Single Query Track)

    Competition results for the QF_LinearRealArith - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_LinearRealArith (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Yices2Yices2OpenSMT - - + + cvc5 - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_LinearRealArith (Single Query Track)

    - + 2021-Yices2n 0 748 @@ -142,7 +142,7 @@

    QF_LinearRealArith (Single Query Track)

    - + 2022-Yices2n 0 748 @@ -153,7 +153,7 @@

    QF_LinearRealArith (Single Query Track)

    - + Yices2 0 748 @@ -164,7 +164,7 @@

    QF_LinearRealArith (Single Query Track)

    - + OpenSMT 0 746 @@ -175,7 +175,7 @@

    QF_LinearRealArith (Single Query Track)

    - + cvc5 0 746 @@ -186,7 +186,7 @@

    QF_LinearRealArith (Single Query Track)

    - + SMTInterpol 0 650 @@ -197,7 +197,7 @@

    QF_LinearRealArith (Single Query Track)

    - + Yaga 0 408 @@ -219,7 +219,7 @@

    QF_LinearRealArith (Single Query Track)

    - + 2021-Yices2n 0 74836425.09636381.06174841733177077 @@ -228,7 +228,7 @@

    QF_LinearRealArith (Single Query Track)

    - + 2022-Yices2n 0 74836530.07536456.99574841733177077 @@ -237,7 +237,7 @@

    QF_LinearRealArith (Single Query Track)

    - + Yices2 0 74837997.87937988.5374841733177077 @@ -246,7 +246,7 @@

    QF_LinearRealArith (Single Query Track)

    - + OpenSMT 0 74641154.82241118.11174641932779079 @@ -255,7 +255,7 @@

    QF_LinearRealArith (Single Query Track)

    - + cvc5 0 74644456.81144609.49374641133579079 @@ -264,7 +264,7 @@

    QF_LinearRealArith (Single Query Track)

    - + SMTInterpol 0 65486622.88973016.9086543842701710171 @@ -273,7 +273,7 @@

    QF_LinearRealArith (Single Query Track)

    - + Yaga 0 40831066.56431072.422408243165170247170 @@ -293,7 +293,7 @@

    QF_LinearRealArith (Single Query Track)

    - + OpenSMT 0 41924762.59124745.15941941902238479 @@ -302,7 +302,7 @@

    QF_LinearRealArith (Single Query Track)

    - + 2021-Yices2n 0 41716302.85316303.61841741702438477 @@ -311,7 +311,7 @@

    QF_LinearRealArith (Single Query Track)

    - + 2022-Yices2n 0 41716433.65516411.88641741702438477 @@ -320,7 +320,7 @@

    QF_LinearRealArith (Single Query Track)

    - + Yices2 0 41716755.81916758.31441741702438477 @@ -329,7 +329,7 @@

    QF_LinearRealArith (Single Query Track)

    - + cvc5 0 41124457.32724597.90441141103038479 @@ -338,7 +338,7 @@

    QF_LinearRealArith (Single Query Track)

    - + SMTInterpol 0 38444633.24639333.018384384057384171 @@ -347,7 +347,7 @@

    QF_LinearRealArith (Single Query Track)

    - + Yaga 0 24318681.40518684.858243243092490170 @@ -367,7 +367,7 @@

    QF_LinearRealArith (Single Query Track)

    - + cvc5 0 33519999.48420011.58933503351447679 @@ -376,7 +376,7 @@

    QF_LinearRealArith (Single Query Track)

    - + 2022-Yices2n 0 33120096.4220045.1133103311847677 @@ -385,7 +385,7 @@

    QF_LinearRealArith (Single Query Track)

    - + 2021-Yices2n 0 33120122.24320077.44333103311847677 @@ -394,7 +394,7 @@

    QF_LinearRealArith (Single Query Track)

    - + Yices2 0 33121242.0621230.21533103311847677 @@ -403,7 +403,7 @@

    QF_LinearRealArith (Single Query Track)

    - + OpenSMT 0 32716392.23116372.95232703272247679 @@ -412,7 +412,7 @@

    QF_LinearRealArith (Single Query Track)

    - + SMTInterpol 0 27041989.64333683.89270027079476171 @@ -421,7 +421,7 @@

    QF_LinearRealArith (Single Query Track)

    - + Yaga 0 16512385.15912387.564165016575585170 @@ -441,7 +441,7 @@

    QF_LinearRealArith (Single Query Track)

    - + 2022-Yices2n 0 6071406.181353.216073572502180218 @@ -450,7 +450,7 @@

    QF_LinearRealArith (Single Query Track)

    - + 2021-Yices2n 0 6061352.7831328.7636063572492190219 @@ -459,7 +459,7 @@

    QF_LinearRealArith (Single Query Track)

    - + Yices2 0 6061373.241359.8496063572492190219 @@ -468,7 +468,7 @@

    QF_LinearRealArith (Single Query Track)

    - + OpenSMT 0 5701910.1441868.4635703162542550255 @@ -477,7 +477,7 @@

    QF_LinearRealArith (Single Query Track)

    - + cvc5 0 5141888.9021885.0225142892253110311 @@ -486,7 +486,7 @@

    QF_LinearRealArith (Single Query Track)

    - + SMTInterpol 0 3984378.1561895.0253982451534270427 @@ -495,7 +495,7 @@

    QF_LinearRealArith (Single Query Track)

    - + Yaga 0 292830.658831.445292175117286247286 @@ -519,7 +519,6 @@

    QF_LinearRealArith (Single Query Track)

    - + - diff --git a/archive/2023/results/qf-linearrealarith-unsat-core.html b/archive/2023/results/qf-linearrealarith-unsat-core.html index 58ed2e63..2d28d621 100644 --- a/archive/2023/results/qf-linearrealarith-unsat-core.html +++ b/archive/2023/results/qf-linearrealarith-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LinearRealArith (Unsat Core Track)

    Competition results for the QF_LinearRealArith - + division - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_LinearRealArith (Unsat Core Track)

    Sequential PerformanceParallel Performance Yices2Yices2 - - + + @@ -126,7 +126,7 @@

    QF_LinearRealArith (Unsat Core Track)

    - + 2020-Yices2n 0 152335 @@ -137,7 +137,7 @@

    QF_LinearRealArith (Unsat Core Track)

    - + Yices2 0 148829 @@ -148,7 +148,7 @@

    QF_LinearRealArith (Unsat Core Track)

    - + cvc5 0 117667 @@ -159,7 +159,7 @@

    QF_LinearRealArith (Unsat Core Track)

    - + SMTInterpol 0 95268 @@ -181,7 +181,7 @@

    QF_LinearRealArith (Unsat Core Track)

    - + 2020-Yices2n 0 15233524417.54924362.144 @@ -190,7 +190,7 @@

    QF_LinearRealArith (Unsat Core Track)

    - + Yices2 0 14882922311.23322309.46247 @@ -199,7 +199,7 @@

    QF_LinearRealArith (Unsat Core Track)

    - + cvc5 0 11766726586.23826590.60518 @@ -208,7 +208,7 @@

    QF_LinearRealArith (Unsat Core Track)

    - + SMTInterpol 0 9526845130.440680.77255 @@ -232,7 +232,6 @@

    QF_LinearRealArith (Unsat Core Track)

    - + - diff --git a/archive/2023/results/qf-lira-model-validation.html b/archive/2023/results/qf-lira-model-validation.html index b39dfe0f..eb1cec4c 100644 --- a/archive/2023/results/qf-lira-model-validation.html +++ b/archive/2023/results/qf-lira-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LIRA (Model Validation Track)

    Competition results for the QF_LIRA - + logic - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_LIRA (Model Validation Track)

    Sequential PerformanceParallel Performance Yices2Yices2 - - + + @@ -126,7 +126,7 @@

    QF_LIRA (Model Validation Track)

    - + Yices2 0 1 @@ -137,7 +137,7 @@

    QF_LIRA (Model Validation Track)

    - + cvc5 0 1 @@ -148,7 +148,7 @@

    QF_LIRA (Model Validation Track)

    - + SMTInterpol 0 1 @@ -170,7 +170,7 @@

    QF_LIRA (Model Validation Track)

    - + Yices2 0 10.110.110 @@ -179,7 +179,7 @@

    QF_LIRA (Model Validation Track)

    - + cvc5 0 12.7122.7190 @@ -188,7 +188,7 @@

    QF_LIRA (Model Validation Track)

    - + SMTInterpol 0 121.7398.7190 @@ -212,7 +212,6 @@

    QF_LIRA (Model Validation Track)

    - + - diff --git a/archive/2023/results/qf-lira-proof-exhibition.html b/archive/2023/results/qf-lira-proof-exhibition.html index 920df563..98a52436 100644 --- a/archive/2023/results/qf-lira-proof-exhibition.html +++ b/archive/2023/results/qf-lira-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LIRA (Proof Exhibition Track)

    Competition results for the QF_LIRA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_LIRA (Proof Exhibition Track)

    - + SMTInterpol 0 3 @@ -130,7 +130,7 @@

    QF_LIRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 3 @@ -141,7 +141,7 @@

    QF_LIRA (Proof Exhibition Track)

    - + cvc5 0 0 @@ -163,7 +163,7 @@

    QF_LIRA (Proof Exhibition Track)

    - + SMTInterpol 0 3251.717164.30822 @@ -172,7 +172,7 @@

    QF_LIRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 3655.108654.55922 @@ -181,7 +181,7 @@

    QF_LIRA (Proof Exhibition Track)

    - + cvc5 0 00.00.055 @@ -205,7 +205,6 @@

    QF_LIRA (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/qf-lira-single-query.html b/archive/2023/results/qf-lira-single-query.html index e7e92935..d59ce5b9 100644 --- a/archive/2023/results/qf-lira-single-query.html +++ b/archive/2023/results/qf-lira-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LIRA (Single Query Track)

    Competition results for the QF_LIRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_LIRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Yices2Yices2Yices2 - - + + Yices2 - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_LIRA (Single Query Track)

    - + 2019-Par4n 0 7 @@ -142,7 +142,7 @@

    QF_LIRA (Single Query Track)

    - + Yices2 0 6 @@ -153,7 +153,7 @@

    QF_LIRA (Single Query Track)

    - + cvc5 0 5 @@ -164,7 +164,7 @@

    QF_LIRA (Single Query Track)

    - + SMTInterpol 0 4 @@ -186,7 +186,7 @@

    QF_LIRA (Single Query Track)

    - + 2019-Par4n 0 7244.105123.39971600 @@ -195,7 +195,7 @@

    QF_LIRA (Single Query Track)

    - + Yices2 0 6153.641153.6561511 @@ -204,7 +204,7 @@

    QF_LIRA (Single Query Track)

    - + cvc5 0 510.2237.56651422 @@ -213,7 +213,7 @@

    QF_LIRA (Single Query Track)

    - + SMTInterpol 0 4181.332104.0441333 @@ -233,7 +233,7 @@

    QF_LIRA (Single Query Track)

    - + Yices2 0 10.080.08110061 @@ -242,7 +242,7 @@

    QF_LIRA (Single Query Track)

    - + 2019-Par4n 0 10.0060.356110060 @@ -251,7 +251,7 @@

    QF_LIRA (Single Query Track)

    - + cvc5 0 12.0222.023110062 @@ -260,7 +260,7 @@

    QF_LIRA (Single Query Track)

    - + SMTInterpol 0 126.7489.251110063 @@ -280,7 +280,7 @@

    QF_LIRA (Single Query Track)

    - + 2019-Par4n 0 6244.099123.043606010 @@ -289,7 +289,7 @@

    QF_LIRA (Single Query Track)

    - + Yices2 0 5153.562153.57505111 @@ -298,7 +298,7 @@

    QF_LIRA (Single Query Track)

    - + cvc5 0 48.2025.542404212 @@ -307,7 +307,7 @@

    QF_LIRA (Single Query Track)

    - + SMTInterpol 0 3154.58394.788303313 @@ -327,7 +327,7 @@

    QF_LIRA (Single Query Track)

    - + Yices2 0 50.4470.45551422 @@ -336,7 +336,7 @@

    QF_LIRA (Single Query Track)

    - + 2019-Par4n 0 53.0252.71851422 @@ -345,7 +345,7 @@

    QF_LIRA (Single Query Track)

    - + cvc5 0 510.2237.56651422 @@ -354,7 +354,7 @@

    QF_LIRA (Single Query Track)

    - + SMTInterpol 0 344.79315.65531244 @@ -378,7 +378,6 @@

    QF_LIRA (Single Query Track)

    - + - diff --git a/archive/2023/results/qf-lira-unsat-core.html b/archive/2023/results/qf-lira-unsat-core.html index e9134940..2b275ed4 100644 --- a/archive/2023/results/qf-lira-unsat-core.html +++ b/archive/2023/results/qf-lira-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LIRA (Unsat Core Track)

    Competition results for the QF_LIRA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_LIRA (Unsat Core Track)

    Sequential PerformanceParallel Performance —— - - + + @@ -126,7 +126,7 @@

    QF_LIRA (Unsat Core Track)

    - + 2022-MathSATn 0 0 @@ -137,7 +137,7 @@

    QF_LIRA (Unsat Core Track)

    - + cvc5 0 0 @@ -148,7 +148,7 @@

    QF_LIRA (Unsat Core Track)

    - + SMTInterpol 0 0 @@ -159,7 +159,7 @@

    QF_LIRA (Unsat Core Track)

    - + Yices2 0 0 @@ -181,7 +181,7 @@

    QF_LIRA (Unsat Core Track)

    - + 2022-MathSATn 0 051.77951.7850 @@ -190,7 +190,7 @@

    QF_LIRA (Unsat Core Track)

    - + cvc5 0 089.37789.3982 @@ -199,7 +199,7 @@

    QF_LIRA (Unsat Core Track)

    - + SMTInterpol 0 0168.94896.6992 @@ -208,7 +208,7 @@

    QF_LIRA (Unsat Core Track)

    - + Yices2 0 0748.048748.2010 @@ -232,7 +232,6 @@

    QF_LIRA (Unsat Core Track)

    - + - diff --git a/archive/2023/results/qf-lra-incremental.html b/archive/2023/results/qf-lra-incremental.html index 3cc733c9..71386916 100644 --- a/archive/2023/results/qf-lra-incremental.html +++ b/archive/2023/results/qf-lra-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LRA (Incremental Track)

    Competition results for the QF_LRA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_LRA (Incremental Track)

    Parallel Performance OpenSMT - - + + @@ -124,7 +124,7 @@

    QF_LRA (Incremental Track)

    - + 2018-MathSAT-incrementaln 0 12567577.737579.42596 @@ -133,7 +133,7 @@

    QF_LRA (Incremental Track)

    - + OpenSMT 0 9538467.248468.085626 @@ -142,7 +142,7 @@

    QF_LRA (Incremental Track)

    - + Yices2 Fixedn 0 90911353.3111355.1160610 @@ -151,7 +151,7 @@

    QF_LRA (Incremental Track)

    - + Yices2 0 90611329.0411330.6560910 @@ -160,7 +160,7 @@

    QF_LRA (Incremental Track)

    - + cvc5 0 69511302.3911303.6382010 @@ -169,7 +169,7 @@

    QF_LRA (Incremental Track)

    - + SMTInterpol 0 51312918.3411099.42100210 @@ -193,7 +193,6 @@

    QF_LRA (Incremental Track)

    - + - diff --git a/archive/2023/results/qf-lra-model-validation.html b/archive/2023/results/qf-lra-model-validation.html index 206d6c48..14016eb3 100644 --- a/archive/2023/results/qf-lra-model-validation.html +++ b/archive/2023/results/qf-lra-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LRA (Model Validation Track)

    Competition results for the QF_LRA - + logic - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_LRA (Model Validation Track)

    Sequential PerformanceParallel Performance OpenSMTOpenSMT - - + + @@ -126,7 +126,7 @@

    QF_LRA (Model Validation Track)

    - + 2022-OpenSMTn 0 507 @@ -137,7 +137,7 @@

    QF_LRA (Model Validation Track)

    - + OpenSMT 0 506 @@ -148,7 +148,7 @@

    QF_LRA (Model Validation Track)

    - + Yices2 0 498 @@ -159,7 +159,7 @@

    QF_LRA (Model Validation Track)

    - + cvc5 0 494 @@ -170,7 +170,7 @@

    QF_LRA (Model Validation Track)

    - + SMTInterpol 0 478 @@ -181,7 +181,7 @@

    QF_LRA (Model Validation Track)

    - + Yaga 0 409 @@ -203,7 +203,7 @@

    QF_LRA (Model Validation Track)

    - + 2022-OpenSMTn 0 50715071.7915090.31714 @@ -212,7 +212,7 @@

    QF_LRA (Model Validation Track)

    - + OpenSMT 0 50615697.11515681.81315 @@ -221,7 +221,7 @@

    QF_LRA (Model Validation Track)

    - + Yices2 0 49811205.35611207.4423 @@ -230,7 +230,7 @@

    QF_LRA (Model Validation Track)

    - + cvc5 0 49424405.54624389.96527 @@ -239,7 +239,7 @@

    QF_LRA (Model Validation Track)

    - + SMTInterpol 0 47836117.60432085.38643 @@ -248,7 +248,7 @@

    QF_LRA (Model Validation Track)

    - + Yaga 0 40910502.7110505.51184 @@ -272,7 +272,6 @@

    QF_LRA (Model Validation Track)

    - + - diff --git a/archive/2023/results/qf-lra-proof-exhibition.html b/archive/2023/results/qf-lra-proof-exhibition.html index 52d325e5..11f3cc10 100644 --- a/archive/2023/results/qf-lra-proof-exhibition.html +++ b/archive/2023/results/qf-lra-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LRA (Proof Exhibition Track)

    Competition results for the QF_LRA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_LRA (Proof Exhibition Track)

    - + SMTInterpol 0 152 @@ -130,7 +130,7 @@

    QF_LRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 145 @@ -141,7 +141,7 @@

    QF_LRA (Proof Exhibition Track)

    - + cvc5 0 54 @@ -163,7 +163,7 @@

    QF_LRA (Proof Exhibition Track)

    - + SMTInterpol 0 15218299.93315469.8883030 @@ -172,7 +172,7 @@

    QF_LRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 14515809.25715804.2883737 @@ -181,7 +181,7 @@

    QF_LRA (Proof Exhibition Track)

    - + cvc5 0 54718.036711.923128122 @@ -205,7 +205,6 @@

    QF_LRA (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/qf-lra-single-query.html b/archive/2023/results/qf-lra-single-query.html index 563422fe..a5babaae 100644 --- a/archive/2023/results/qf-lra-single-query.html +++ b/archive/2023/results/qf-lra-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LRA (Single Query Track)

    Competition results for the QF_LRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_LRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) OpenSMTOpenSMTOpenSMT - - + + OpenSMT - - + + OpenSMT - + @@ -131,7 +131,7 @@

    QF_LRA (Single Query Track)

    - + OpenSMT 0 554 @@ -142,7 +142,7 @@

    QF_LRA (Single Query Track)

    - + cvc5 0 535 @@ -153,7 +153,7 @@

    QF_LRA (Single Query Track)

    - + 2021-Yices2n 0 534 @@ -164,7 +164,7 @@

    QF_LRA (Single Query Track)

    - + 2022-Yices2n 0 534 @@ -175,7 +175,7 @@

    QF_LRA (Single Query Track)

    - + Yices2 0 534 @@ -186,7 +186,7 @@

    QF_LRA (Single Query Track)

    - + SMTInterpol 0 472 @@ -197,7 +197,7 @@

    QF_LRA (Single Query Track)

    - + Yaga 0 408 @@ -219,7 +219,7 @@

    QF_LRA (Single Query Track)

    - + OpenSMT 0 55424449.97224416.3275543202342424 @@ -228,7 +228,7 @@

    QF_LRA (Single Query Track)

    - + cvc5 0 53537122.85337274.3625353082274343 @@ -237,7 +237,7 @@

    QF_LRA (Single Query Track)

    - + 2021-Yices2n 0 53432857.65832813.1245343112234444 @@ -246,7 +246,7 @@

    QF_LRA (Single Query Track)

    - + 2022-Yices2n 0 53432989.22132916.5075343112234444 @@ -255,7 +255,7 @@

    QF_LRA (Single Query Track)

    - + Yices2 0 53434496.12234486.3945343112234444 @@ -264,7 +264,7 @@

    QF_LRA (Single Query Track)

    - + SMTInterpol 0 47572929.06262961.246475286189103103 @@ -273,7 +273,7 @@

    QF_LRA (Single Query Track)

    - + Yaga 0 40831066.56431072.422408243165170170 @@ -293,7 +293,7 @@

    QF_LRA (Single Query Track)

    - + OpenSMT 0 32016532.12716518.65432032001524324 @@ -302,7 +302,7 @@

    QF_LRA (Single Query Track)

    - + 2021-Yices2n 0 31114507.81114508.3431131102424344 @@ -311,7 +311,7 @@

    QF_LRA (Single Query Track)

    - + 2022-Yices2n 0 31114647.9714626.06331131102424344 @@ -320,7 +320,7 @@

    QF_LRA (Single Query Track)

    - + Yices2 0 31114988.86914991.22331131102424344 @@ -329,7 +329,7 @@

    QF_LRA (Single Query Track)

    - + cvc5 0 30821183.37621323.48330830802724343 @@ -338,7 +338,7 @@

    QF_LRA (Single Query Track)

    - + SMTInterpol 0 28639092.03834695.28286286049243103 @@ -347,7 +347,7 @@

    QF_LRA (Single Query Track)

    - + Yaga 0 24318681.40518684.858243243092243170 @@ -367,7 +367,7 @@

    QF_LRA (Single Query Track)

    - + OpenSMT 0 2347917.8457897.6732340234633824 @@ -376,7 +376,7 @@

    QF_LRA (Single Query Track)

    - + cvc5 0 22715939.47715950.87922702271333843 @@ -385,7 +385,7 @@

    QF_LRA (Single Query Track)

    - + 2022-Yices2n 0 22318341.25118290.44422302231733844 @@ -394,7 +394,7 @@

    QF_LRA (Single Query Track)

    - + 2021-Yices2n 0 22318349.84618304.78422302231733844 @@ -403,7 +403,7 @@

    QF_LRA (Single Query Track)

    - + Yices2 0 22319507.25319495.17122302231733844 @@ -412,7 +412,7 @@

    QF_LRA (Single Query Track)

    - + SMTInterpol 0 18933837.02428265.966189018951338103 @@ -421,7 +421,7 @@

    QF_LRA (Single Query Track)

    - + Yaga 0 16512385.15912387.564165016575338170 @@ -441,7 +441,7 @@

    QF_LRA (Single Query Track)

    - + OpenSMT 0 4281518.7761481.885428239189150150 @@ -450,7 +450,7 @@

    QF_LRA (Single Query Track)

    - + 2021-Yices2n 0 4141027.2881003.152414258156164164 @@ -459,7 +459,7 @@

    QF_LRA (Single Query Track)

    - + 2022-Yices2n 0 4141056.6941004.351414258156164164 @@ -468,7 +468,7 @@

    QF_LRA (Single Query Track)

    - + Yices2 0 4131027.7011014.194413258155165165 @@ -477,7 +477,7 @@

    QF_LRA (Single Query Track)

    - + cvc5 0 3501238.8211234.779350206144228228 @@ -486,7 +486,7 @@

    QF_LRA (Single Query Track)

    - + Yaga 0 292830.658831.445292175117286286 @@ -495,7 +495,7 @@

    QF_LRA (Single Query Track)

    - + SMTInterpol 0 2732904.0351213.32627317499305305 @@ -519,7 +519,6 @@

    QF_LRA (Single Query Track)

    - + - diff --git a/archive/2023/results/qf-lra-unsat-core.html b/archive/2023/results/qf-lra-unsat-core.html index cf531b9e..0fbbf41f 100644 --- a/archive/2023/results/qf-lra-unsat-core.html +++ b/archive/2023/results/qf-lra-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_LRA (Unsat Core Track)

    Competition results for the QF_LRA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_LRA (Unsat Core Track)

    Sequential PerformanceParallel Performance Yices2Yices2 - - + + @@ -126,7 +126,7 @@

    QF_LRA (Unsat Core Track)

    - + 2020-Yices2n 0 152335 @@ -137,7 +137,7 @@

    QF_LRA (Unsat Core Track)

    - + Yices2 0 148829 @@ -148,7 +148,7 @@

    QF_LRA (Unsat Core Track)

    - + cvc5 0 117667 @@ -159,7 +159,7 @@

    QF_LRA (Unsat Core Track)

    - + SMTInterpol 0 95268 @@ -181,7 +181,7 @@

    QF_LRA (Unsat Core Track)

    - + 2020-Yices2n 0 15233524417.54924362.144 @@ -190,7 +190,7 @@

    QF_LRA (Unsat Core Track)

    - + Yices2 0 14882922311.23322309.46247 @@ -199,7 +199,7 @@

    QF_LRA (Unsat Core Track)

    - + cvc5 0 11766726586.23826590.60518 @@ -208,7 +208,7 @@

    QF_LRA (Unsat Core Track)

    - + SMTInterpol 0 9526845130.440680.77255 @@ -232,7 +232,6 @@

    QF_LRA (Unsat Core Track)

    - + - diff --git a/archive/2023/results/qf-nia-incremental.html b/archive/2023/results/qf-nia-incremental.html index 49a8d1cc..4ec613f3 100644 --- a/archive/2023/results/qf-nia-incremental.html +++ b/archive/2023/results/qf-nia-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_NIA (Incremental Track)

    Competition results for the QF_NIA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_NIA (Incremental Track)

    Parallel Performance SMTInterpol - - + + @@ -124,7 +124,7 @@

    QF_NIA (Incremental Track)

    - + 2021-MathSAT5n 0 41816661726.191476.44375472 @@ -133,7 +133,7 @@

    QF_NIA (Incremental Track)

    - + SMTInterpol 0 41816573730.52810.41375561 @@ -142,7 +142,7 @@

    QF_NIA (Incremental Track)

    - + cvc5 0 173931111578.3211447.52247990211 @@ -151,7 +151,7 @@

    QF_NIA (Incremental Track)

    - + Yices2 Fixedn 0 1929048992.268983.38402630912 @@ -160,7 +160,7 @@

    QF_NIA (Incremental Track)

    - + Yices2 0 1924668996.088986.93402674712 @@ -184,7 +184,6 @@

    QF_NIA (Incremental Track)

    - + - diff --git a/archive/2023/results/qf-nia-model-validation.html b/archive/2023/results/qf-nia-model-validation.html index 645e7e9b..fc08e4e9 100644 --- a/archive/2023/results/qf-nia-model-validation.html +++ b/archive/2023/results/qf-nia-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_NIA (Model Validation Track)

    Competition results for the QF_NIA - + logic - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_NIA (Model Validation Track)

    Sequential PerformanceParallel Performance Z3++Z3++ - - + + @@ -126,7 +126,7 @@

    QF_NIA (Model Validation Track)

    - + Z3++ 0 7450 @@ -137,7 +137,7 @@

    QF_NIA (Model Validation Track)

    - + ismt 0 7316 @@ -148,7 +148,7 @@

    QF_NIA (Model Validation Track)

    - + Yices2 0 5724 @@ -159,7 +159,7 @@

    QF_NIA (Model Validation Track)

    - + cvc5 0 4841 @@ -181,7 +181,7 @@

    QF_NIA (Model Validation Track)

    - + Z3++ 0 7450256987.989256551.118138 @@ -190,7 +190,7 @@

    QF_NIA (Model Validation Track)

    - + ismt 0 7316297844.868296688.184281 @@ -199,7 +199,7 @@

    QF_NIA (Model Validation Track)

    - + Yices2 0 572498630.79298278.5661883 @@ -208,7 +208,7 @@

    QF_NIA (Model Validation Track)

    - + cvc5 0 4841310043.635309461.5872766 @@ -232,7 +232,6 @@

    QF_NIA (Model Validation Track)

    - + - diff --git a/archive/2023/results/qf-nia-proof-exhibition.html b/archive/2023/results/qf-nia-proof-exhibition.html index a09e0688..baf8a303 100644 --- a/archive/2023/results/qf-nia-proof-exhibition.html +++ b/archive/2023/results/qf-nia-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_NIA (Proof Exhibition Track)

    Competition results for the QF_NIA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_NIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 1286 @@ -130,7 +130,7 @@

    QF_NIA (Proof Exhibition Track)

    - + cvc5 0 276 @@ -152,7 +152,7 @@

    QF_NIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 1286109967.751109762.664968959 @@ -161,7 +161,7 @@

    QF_NIA (Proof Exhibition Track)

    - + cvc5 0 27613253.38913063.42519781897 @@ -185,7 +185,6 @@

    QF_NIA (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/qf-nia-single-query.html b/archive/2023/results/qf-nia-single-query.html index 7f7935f7..5b25acaa 100644 --- a/archive/2023/results/qf-nia-single-query.html +++ b/archive/2023/results/qf-nia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_NIA (Single Query Track)

    Competition results for the QF_NIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_NIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Z3++Z3++Z3++ - - + + Z3++ - - + + Z3++ - + @@ -131,7 +131,7 @@

    QF_NIA (Single Query Track)

    - + Z3++ 0 9990 @@ -142,7 +142,7 @@

    QF_NIA (Single Query Track)

    - + 2022-Z3++-fixedn 0 9603 @@ -153,7 +153,7 @@

    QF_NIA (Single Query Track)

    - + yices-ismt 0 9406 @@ -164,7 +164,7 @@

    QF_NIA (Single Query Track)

    - + z3-alpha 0 9115 @@ -175,7 +175,7 @@

    QF_NIA (Single Query Track)

    - + cvc5 0 8040 @@ -186,7 +186,7 @@

    QF_NIA (Single Query Track)

    - + Yices2 0 7619 @@ -208,7 +208,7 @@

    QF_NIA (Single Query Track)

    - + Z3++ 0 9990546309.754545352.35299906914307619551899 @@ -217,7 +217,7 @@

    QF_NIA (Single Query Track)

    - + 2022-Z3++-fixedn 0 9603587797.089586781.74596036676292723422090 @@ -226,7 +226,7 @@

    QF_NIA (Single Query Track)

    - + yices-ismt 0 9406626660.095625732.3019406680725992539958 @@ -235,7 +235,7 @@

    QF_NIA (Single Query Track)

    - + z3-alpha 0 9115804176.979803353.90891156084303128302822 @@ -244,7 +244,7 @@

    QF_NIA (Single Query Track)

    - + cvc5 0 80401771231.9121776923.04480405762227839053900 @@ -253,7 +253,7 @@

    QF_NIA (Single Query Track)

    - + Yices2 0 7619123276.48122889.50676195107251243264323 @@ -273,7 +273,7 @@

    QF_NIA (Single Query Track)

    - + Z3++ 0 6914264761.465264138.36369146914017248591899 @@ -282,7 +282,7 @@

    QF_NIA (Single Query Track)

    - + yices-ismt 0 6807423837.965423045.0756807680702794859958 @@ -291,7 +291,7 @@

    QF_NIA (Single Query Track)

    - + 2022-Z3++-fixedn 0 6676544036.075543321.4166766676041048592090 @@ -300,7 +300,7 @@

    QF_NIA (Single Query Track)

    - + z3-alpha 0 6084545912.283545513.13608460840100248592822 @@ -309,7 +309,7 @@

    QF_NIA (Single Query Track)

    - + cvc5 0 57621673869.0261679669.249576257620132448593900 @@ -318,7 +318,7 @@

    QF_NIA (Single Query Track)

    - + Yices2 0 510796073.99295726.318510751070197948594323 @@ -338,7 +338,7 @@

    QF_NIA (Single Query Track)

    - + Z3++ 0 3076281548.29281213.98930760307627285971899 @@ -347,7 +347,7 @@

    QF_NIA (Single Query Track)

    - + z3-alpha 0 3031258264.696257840.77830310303131785972822 @@ -356,7 +356,7 @@

    QF_NIA (Single Query Track)

    - + 2022-Z3++-fixedn 0 292743761.01443460.33629270292742185972090 @@ -365,7 +365,7 @@

    QF_NIA (Single Query Track)

    - + yices-ismt 0 2599202822.13202687.2262599025997498597958 @@ -374,7 +374,7 @@

    QF_NIA (Single Query Track)

    - + Yices2 0 251227202.48827163.18825120251283685974323 @@ -383,7 +383,7 @@

    QF_NIA (Single Query Track)

    - + cvc5 0 227897362.88697253.795227802278107085973900 @@ -403,7 +403,7 @@

    QF_NIA (Single Query Track)

    - + Z3++ 0 716026172.53725826.82771605386177447854785 @@ -412,7 +412,7 @@

    QF_NIA (Single Query Track)

    - + Yices2 0 693216088.83815786.04269324597233550135013 @@ -421,7 +421,7 @@

    QF_NIA (Single Query Track)

    - + yices-ismt 0 655415622.08215450.74265544342221253915389 @@ -430,7 +430,7 @@

    QF_NIA (Single Query Track)

    - + z3-alpha 0 640535823.31935327.66364054398200755405537 @@ -439,7 +439,7 @@

    QF_NIA (Single Query Track)

    - + 2022-Z3++-fixedn 0 619322025.4421681.19261933715247857525751 @@ -448,7 +448,7 @@

    QF_NIA (Single Query Track)

    - + cvc5 0 423016889.70816638.45742302329190177157710 @@ -472,7 +472,6 @@

    QF_NIA (Single Query Track)

    - + - diff --git a/archive/2023/results/qf-nira-proof-exhibition.html b/archive/2023/results/qf-nira-proof-exhibition.html index f4fd5c79..f13d3dee 100644 --- a/archive/2023/results/qf-nira-proof-exhibition.html +++ b/archive/2023/results/qf-nira-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_NIRA (Proof Exhibition Track)

    Competition results for the QF_NIRA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_NIRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 1 @@ -130,7 +130,7 @@

    QF_NIRA (Proof Exhibition Track)

    - + cvc5 0 1 @@ -152,7 +152,7 @@

    QF_NIRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 10.0860.08511 @@ -161,7 +161,7 @@

    QF_NIRA (Proof Exhibition Track)

    - + cvc5 0 10.1080.10711 @@ -185,7 +185,6 @@

    QF_NIRA (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/qf-nira-single-query.html b/archive/2023/results/qf-nira-single-query.html index a6a2a7fd..859114e7 100644 --- a/archive/2023/results/qf-nira-single-query.html +++ b/archive/2023/results/qf-nira-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_NIRA (Single Query Track)

    Competition results for the QF_NIRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_NIRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5— - - + + cvc5 - - + + — - + @@ -131,7 +131,7 @@

    QF_NIRA (Single Query Track)

    - + cvc5 0 1 @@ -142,7 +142,7 @@

    QF_NIRA (Single Query Track)

    - + Yices2 0 0 @@ -164,7 +164,7 @@

    QF_NIRA (Single Query Track)

    - + cvc5 0 1237.43237.47210111 @@ -173,7 +173,7 @@

    QF_NIRA (Single Query Track)

    - + Yices2 0 00.00.000022 @@ -193,7 +193,7 @@

    QF_NIRA (Single Query Track)

    - + Yices2 0 00.00.0000022 @@ -202,7 +202,7 @@

    QF_NIRA (Single Query Track)

    - + cvc5 0 00.00.0000021 @@ -222,7 +222,7 @@

    QF_NIRA (Single Query Track)

    - + cvc5 0 1237.43237.472101011 @@ -231,7 +231,7 @@

    QF_NIRA (Single Query Track)

    - + Yices2 0 00.00.0000112 @@ -251,7 +251,7 @@

    QF_NIRA (Single Query Track)

    - + Yices2 0 00.00.000022 @@ -260,7 +260,7 @@

    QF_NIRA (Single Query Track)

    - + cvc5 0 00.00.000022 @@ -284,7 +284,6 @@

    QF_NIRA (Single Query Track)

    - + - diff --git a/archive/2023/results/qf-nonlinearintarith-incremental.html b/archive/2023/results/qf-nonlinearintarith-incremental.html index fe0cc174..17064174 100644 --- a/archive/2023/results/qf-nonlinearintarith-incremental.html +++ b/archive/2023/results/qf-nonlinearintarith-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_NonLinearIntArith (Incremental Track)

    Competition results for the QF_NonLinearIntArith - + division - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_NonLinearIntArith (Incremental Track)

    Parallel Performance SMTInterpol - - + + @@ -124,7 +124,7 @@

    QF_NonLinearIntArith (Incremental Track)

    - + 2021-MathSAT5n 0 41816661726.191476.443754702 @@ -133,7 +133,7 @@

    QF_NonLinearIntArith (Incremental Track)

    - + SMTInterpol 0 41816573730.52810.413755601 @@ -142,7 +142,7 @@

    QF_NonLinearIntArith (Incremental Track)

    - + cvc5 0 173931111578.3211447.522479902011 @@ -151,7 +151,7 @@

    QF_NonLinearIntArith (Incremental Track)

    - + Yices2 Fixedn 0 1929048992.268983.384026309012 @@ -160,7 +160,7 @@

    QF_NonLinearIntArith (Incremental Track)

    - + Yices2 0 1924668996.088986.934026747012 @@ -184,7 +184,6 @@

    QF_NonLinearIntArith (Incremental Track)

    - + - diff --git a/archive/2023/results/qf-nonlinearintarith-model-validation.html b/archive/2023/results/qf-nonlinearintarith-model-validation.html index df4df025..1373eb12 100644 --- a/archive/2023/results/qf-nonlinearintarith-model-validation.html +++ b/archive/2023/results/qf-nonlinearintarith-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_NonLinearIntArith (Model Validation Track)

    Competition results for the QF_NonLinearIntArith - + division - + in the Model Validation Track.

    @@ -119,7 +119,7 @@

    QF_NonLinearIntArith (Model Validation Track)

    - + Z3++ 0 7450 @@ -130,7 +130,7 @@

    QF_NonLinearIntArith (Model Validation Track)

    - + ismt 0 7316 @@ -141,7 +141,7 @@

    QF_NonLinearIntArith (Model Validation Track)

    - + Yices2 0 5724 @@ -152,7 +152,7 @@

    QF_NonLinearIntArith (Model Validation Track)

    - + cvc5 0 4841 @@ -174,7 +174,7 @@

    QF_NonLinearIntArith (Model Validation Track)

    - + Z3++ 0 7450256987.989256551.118138 @@ -183,7 +183,7 @@

    QF_NonLinearIntArith (Model Validation Track)

    - + ismt 0 7316297844.868296688.184281 @@ -192,7 +192,7 @@

    QF_NonLinearIntArith (Model Validation Track)

    - + Yices2 0 572498630.79298278.5661883 @@ -201,7 +201,7 @@

    QF_NonLinearIntArith (Model Validation Track)

    - + cvc5 0 4841310043.635309461.5872766 @@ -225,7 +225,6 @@

    QF_NonLinearIntArith (Model Validation Track)

    - + - diff --git a/archive/2023/results/qf-nonlinearintarith-proof-exhibition.html b/archive/2023/results/qf-nonlinearintarith-proof-exhibition.html index 4e3497ea..4b8d32d9 100644 --- a/archive/2023/results/qf-nonlinearintarith-proof-exhibition.html +++ b/archive/2023/results/qf-nonlinearintarith-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_NonLinearIntArith (Proof Exhibition Track)

    Competition results for the QF_NonLinearIntArith - + division - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_NonLinearIntArith (Proof Exhibition Track)

    - + cvc5-lfsc 0 1287 @@ -130,7 +130,7 @@

    QF_NonLinearIntArith (Proof Exhibition Track)

    - + cvc5 0 277 @@ -152,7 +152,7 @@

    QF_NonLinearIntArith (Proof Exhibition Track)

    - + cvc5-lfsc 0 1287109967.837109762.759690960 @@ -161,7 +161,7 @@

    QF_NonLinearIntArith (Proof Exhibition Track)

    - + cvc5 0 27713253.49713063.531197901898 @@ -185,7 +185,6 @@

    QF_NonLinearIntArith (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/qf-nonlinearintarith-single-query.html b/archive/2023/results/qf-nonlinearintarith-single-query.html index 63a48bfb..9978a1f4 100644 --- a/archive/2023/results/qf-nonlinearintarith-single-query.html +++ b/archive/2023/results/qf-nonlinearintarith-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_NonLinearIntArith (Single Query Track)

    Competition results for the QF_NonLinearIntArith - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_NonLinearIntArith (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Z3++Z3++Z3++ - - + + Z3++ - - + + Z3++ - + @@ -131,7 +131,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + Z3++ 0 9990 @@ -142,7 +142,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + 2022-Z3++-fixedn 0 9603 @@ -153,7 +153,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + yices-ismt 0 9406 @@ -164,7 +164,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + z3-alpha 0 9115 @@ -175,7 +175,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + cvc5 0 8041 @@ -186,7 +186,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + Yices2 0 7619 @@ -208,7 +208,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + Z3++ 0 9990546309.754545352.352999069143076195521899 @@ -217,7 +217,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + 2022-Z3++-fixedn 0 9603587797.089586781.745960366762927234222090 @@ -226,7 +226,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + yices-ismt 0 9406626660.095625732.30194066807259925392958 @@ -235,7 +235,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + z3-alpha 0 9115804176.979803353.908911560843031283022822 @@ -244,7 +244,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + cvc5 0 80411771469.3421777160.516804157622279390603901 @@ -253,7 +253,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + Yices2 0 7619123276.48122889.506761951072512432804325 @@ -273,7 +273,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + Z3++ 0 6914264761.465264138.36369146914017248611899 @@ -282,7 +282,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + yices-ismt 0 6807423837.965423045.0756807680702794861958 @@ -291,7 +291,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + 2022-Z3++-fixedn 0 6676544036.075543321.4166766676041048612090 @@ -300,7 +300,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + z3-alpha 0 6084545912.283545513.13608460840100248612822 @@ -309,7 +309,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + cvc5 0 57621673869.0261679669.249576257620132448613901 @@ -318,7 +318,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + Yices2 0 510796073.99295726.318510751070197948614325 @@ -338,7 +338,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + Z3++ 0 3076281548.29281213.98930760307627285991899 @@ -347,7 +347,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + z3-alpha 0 3031258264.696257840.77830310303131785992822 @@ -356,7 +356,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + 2022-Z3++-fixedn 0 292743761.01443460.33629270292742185992090 @@ -365,7 +365,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + yices-ismt 0 2599202822.13202687.2262599025997498599958 @@ -374,7 +374,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + Yices2 0 251227202.48827163.18825120251283785984325 @@ -383,7 +383,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + cvc5 0 227997600.31697491.267227902279107085983901 @@ -403,7 +403,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + Z3++ 0 716026172.53725826.827716053861774478524785 @@ -412,7 +412,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + Yices2 0 693216088.83815786.042693245972335501505015 @@ -421,7 +421,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + yices-ismt 0 655415622.08215450.742655443422212539125389 @@ -430,7 +430,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + z3-alpha 0 640535823.31935327.663640543982007554025537 @@ -439,7 +439,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + 2022-Z3++-fixedn 0 619322025.4421681.192619337152478575225751 @@ -448,7 +448,7 @@

    QF_NonLinearIntArith (Single Query Track)

    - + cvc5 0 423016889.70816638.457423023291901771707712 @@ -472,7 +472,6 @@

    QF_NonLinearIntArith (Single Query Track)

    - + - diff --git a/archive/2023/results/qf-nonlinearrealarith-model-validation.html b/archive/2023/results/qf-nonlinearrealarith-model-validation.html index 863d8a92..a10ecd72 100644 --- a/archive/2023/results/qf-nonlinearrealarith-model-validation.html +++ b/archive/2023/results/qf-nonlinearrealarith-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_NonLinearRealArith (Model Validation Track)

    Competition results for the QF_NonLinearRealArith - + division - + in the Model Validation Track.

    @@ -119,7 +119,7 @@

    QF_NonLinearRealArith (Model Validation Track)

    - + Z3++ 0 2803 @@ -130,7 +130,7 @@

    QF_NonLinearRealArith (Model Validation Track)

    - + SMT-RAT-MCSAT 0 2605 @@ -141,7 +141,7 @@

    QF_NonLinearRealArith (Model Validation Track)

    - + Yices2 0 2172 @@ -152,7 +152,7 @@

    QF_NonLinearRealArith (Model Validation Track)

    - + cvc5 0 2111 @@ -163,7 +163,7 @@

    QF_NonLinearRealArith (Model Validation Track)

    - + cvc5-NRA-LS 0 2107 @@ -185,7 +185,7 @@

    QF_NonLinearRealArith (Model Validation Track)

    - + Z3++ 0 280326911.8126806.30785 @@ -194,7 +194,7 @@

    QF_NonLinearRealArith (Model Validation Track)

    - + SMT-RAT-MCSAT 0 260528708.12928705.332256 @@ -203,7 +203,7 @@

    QF_NonLinearRealArith (Model Validation Track)

    - + Yices2 0 217213928.77213920.287239 @@ -212,7 +212,7 @@

    QF_NonLinearRealArith (Model Validation Track)

    - + cvc5 0 211135969.61535914.853237 @@ -221,7 +221,7 @@

    QF_NonLinearRealArith (Model Validation Track)

    - + cvc5-NRA-LS 0 210734103.51434109.7734 @@ -245,7 +245,6 @@

    QF_NonLinearRealArith (Model Validation Track)

    - + - diff --git a/archive/2023/results/qf-nonlinearrealarith-proof-exhibition.html b/archive/2023/results/qf-nonlinearrealarith-proof-exhibition.html index cfed164e..cd6db7c5 100644 --- a/archive/2023/results/qf-nonlinearrealarith-proof-exhibition.html +++ b/archive/2023/results/qf-nonlinearrealarith-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_NonLinearRealArith (Proof Exhibition Track)

    Competition results for the QF_NonLinearRealArith - + division - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_NonLinearRealArith (Proof Exhibition Track)

    - + cvc5-lfsc 0 1506 @@ -130,7 +130,7 @@

    QF_NonLinearRealArith (Proof Exhibition Track)

    - + cvc5 0 784 @@ -152,7 +152,7 @@

    QF_NonLinearRealArith (Proof Exhibition Track)

    - + cvc5-lfsc 0 150641479.541439.76890090 @@ -161,7 +161,7 @@

    QF_NonLinearRealArith (Proof Exhibition Track)

    - + cvc5 0 7843917.2583893.268120804 @@ -185,7 +185,6 @@

    QF_NonLinearRealArith (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/qf-nonlinearrealarith-single-query.html b/archive/2023/results/qf-nonlinearrealarith-single-query.html index 82a98d9a..29053d6d 100644 --- a/archive/2023/results/qf-nonlinearrealarith-single-query.html +++ b/archive/2023/results/qf-nonlinearrealarith-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_NonLinearRealArith (Single Query Track)

    Competition results for the QF_NonLinearRealArith - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_NonLinearRealArith (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Z3++Z3++Z3++ - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + Z3++ 0 2540 @@ -142,7 +142,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + 2019-Par4n 0 2525 @@ -153,7 +153,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + cvc5 0 2461 @@ -164,7 +164,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + cvc5-NRA-LS 0 2401 @@ -175,7 +175,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + Yices2 0 2334 @@ -186,7 +186,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + z3-alpha 0 2321 @@ -197,7 +197,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + SMT-RAT-MCSAT 0 2251 @@ -208,7 +208,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + 2022-Z3++-fixedn 1 2541 @@ -230,7 +230,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + 2019-Par4n 0 2550105836.15535494.2722550126912812450183 @@ -239,7 +239,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + Z3++ 0 254064738.8764580.6342540129812422550252 @@ -248,7 +248,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + cvc5 0 2462107056.453107043.6622462120112613330333 @@ -257,7 +257,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + cvc5-NRA-LS 0 240171685.80371546.25524011158124339405 @@ -266,7 +266,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + Yices2 0 233444594.32644499.5272334114411904610461 @@ -275,7 +275,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + z3-alpha 0 232130779.78630690.8462321120811134740345 @@ -284,7 +284,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + SMT-RAT-MCSAT 0 225186525.15386425.6542251113011215440529 @@ -293,7 +293,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + 2022-Z3++-fixedn 1 254166187.86266166.7122541129912422540250 @@ -313,7 +313,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + 2022-Z3++-fixedn 0 129932946.23332923.564129912990841412250 @@ -322,7 +322,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + Z3++ 0 129826033.99225931.666129812980851412252 @@ -331,7 +331,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + 2019-Par4n 0 126964823.73121715.9621269126901141412183 @@ -340,7 +340,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + z3-alpha 0 120811623.18511590.2591208120801751412345 @@ -349,7 +349,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + cvc5 0 120144690.91944450.0531201120101821412333 @@ -358,7 +358,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + cvc5-NRA-LS 0 115836394.89736361.98511581158022514125 @@ -367,7 +367,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + Yices2 0 114418293.40218248.3661144114402391412461 @@ -376,7 +376,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + SMT-RAT-MCSAT 0 113034399.21534388.5531130113002531412529 @@ -396,7 +396,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + 2019-Par4n 0 128141012.42313778.31128101281291485183 @@ -405,7 +405,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + cvc5 0 126162365.53462593.608126101261491485333 @@ -414,7 +414,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + cvc5-NRA-LS 0 124335290.90635184.271243012436714855 @@ -423,7 +423,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + Z3++ 0 124238704.87838648.967124201242681485252 @@ -432,7 +432,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + Yices2 0 119026300.92426251.1611190011901201485461 @@ -441,7 +441,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + SMT-RAT-MCSAT 0 112152125.93852037.1021121011211891485529 @@ -450,7 +450,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + z3-alpha 0 111319156.60219100.5861113011131971485345 @@ -459,7 +459,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + 2022-Z3++-fixedn 1 124233241.62933243.148124201242681485250 @@ -479,7 +479,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + 2019-Par4n 0 23777495.0742623.1152377118811894180356 @@ -488,7 +488,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + cvc5 0 21862782.1782733.8272186109210946090609 @@ -497,7 +497,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + 2022-Z3++-fixedn 0 21562288.0652284.6742156112010366390638 @@ -506,7 +506,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + Z3++ 0 21562332.0782326.9672156112010366390638 @@ -515,7 +515,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + Yices2 0 21381138.7191125.0042138105710816570657 @@ -524,7 +524,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + z3-alpha 0 21303897.8373897.797213011409906650665 @@ -533,7 +533,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + cvc5-NRA-LS 0 21103474.9443404.1372110102010906850685 @@ -542,7 +542,7 @@

    QF_NonLinearRealArith (Single Query Track)

    - + SMT-RAT-MCSAT 0 19183041.1892963.338191810338858770864 @@ -566,7 +566,6 @@

    QF_NonLinearRealArith (Single Query Track)

    - + - diff --git a/archive/2023/results/qf-nra-model-validation.html b/archive/2023/results/qf-nra-model-validation.html index 79e12175..debf682a 100644 --- a/archive/2023/results/qf-nra-model-validation.html +++ b/archive/2023/results/qf-nra-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_NRA (Model Validation Track)

    Competition results for the QF_NRA - + logic - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_NRA (Model Validation Track)

    Sequential PerformanceParallel Performance Z3++Z3++ - - + + @@ -126,7 +126,7 @@

    QF_NRA (Model Validation Track)

    - + Z3++ 0 2803 @@ -137,7 +137,7 @@

    QF_NRA (Model Validation Track)

    - + SMT-RAT-MCSAT 0 2605 @@ -148,7 +148,7 @@

    QF_NRA (Model Validation Track)

    - + Yices2 0 2172 @@ -159,7 +159,7 @@

    QF_NRA (Model Validation Track)

    - + cvc5 0 2111 @@ -170,7 +170,7 @@

    QF_NRA (Model Validation Track)

    - + cvc5-NRA-LS 0 2107 @@ -192,7 +192,7 @@

    QF_NRA (Model Validation Track)

    - + Z3++ 0 280326911.8126806.30785 @@ -201,7 +201,7 @@

    QF_NRA (Model Validation Track)

    - + SMT-RAT-MCSAT 0 260528708.12928705.332256 @@ -210,7 +210,7 @@

    QF_NRA (Model Validation Track)

    - + Yices2 0 217213928.77213920.287239 @@ -219,7 +219,7 @@

    QF_NRA (Model Validation Track)

    - + cvc5 0 211135969.61535914.853237 @@ -228,7 +228,7 @@

    QF_NRA (Model Validation Track)

    - + cvc5-NRA-LS 0 210734103.51434109.7734 @@ -252,7 +252,6 @@

    QF_NRA (Model Validation Track)

    - + - diff --git a/archive/2023/results/qf-nra-proof-exhibition.html b/archive/2023/results/qf-nra-proof-exhibition.html index 15bbe7ab..f3ec2c9d 100644 --- a/archive/2023/results/qf-nra-proof-exhibition.html +++ b/archive/2023/results/qf-nra-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_NRA (Proof Exhibition Track)

    Competition results for the QF_NRA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_NRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 1506 @@ -130,7 +130,7 @@

    QF_NRA (Proof Exhibition Track)

    - + cvc5 0 784 @@ -152,7 +152,7 @@

    QF_NRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 150641479.541439.7689090 @@ -161,7 +161,7 @@

    QF_NRA (Proof Exhibition Track)

    - + cvc5 0 7843917.2583893.26812804 @@ -185,7 +185,6 @@

    QF_NRA (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/qf-nra-single-query.html b/archive/2023/results/qf-nra-single-query.html index 5cedc79b..8b9c56f2 100644 --- a/archive/2023/results/qf-nra-single-query.html +++ b/archive/2023/results/qf-nra-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_NRA (Single Query Track)

    Competition results for the QF_NRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_NRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Z3++Z3++Z3++ - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    QF_NRA (Single Query Track)

    - + Z3++ 0 2540 @@ -142,7 +142,7 @@

    QF_NRA (Single Query Track)

    - + 2019-Par4n 0 2525 @@ -153,7 +153,7 @@

    QF_NRA (Single Query Track)

    - + cvc5 0 2461 @@ -164,7 +164,7 @@

    QF_NRA (Single Query Track)

    - + cvc5-NRA-LS 0 2401 @@ -175,7 +175,7 @@

    QF_NRA (Single Query Track)

    - + Yices2 0 2334 @@ -186,7 +186,7 @@

    QF_NRA (Single Query Track)

    - + z3-alpha 0 2321 @@ -197,7 +197,7 @@

    QF_NRA (Single Query Track)

    - + SMT-RAT-MCSAT 0 2251 @@ -208,7 +208,7 @@

    QF_NRA (Single Query Track)

    - + 2022-Z3++-fixedn 1 2541 @@ -230,7 +230,7 @@

    QF_NRA (Single Query Track)

    - + 2019-Par4n 0 2550105836.15535494.272255012691281245183 @@ -239,7 +239,7 @@

    QF_NRA (Single Query Track)

    - + Z3++ 0 254064738.8764580.634254012981242255252 @@ -248,7 +248,7 @@

    QF_NRA (Single Query Track)

    - + cvc5 0 2462107056.453107043.662246212011261333333 @@ -257,7 +257,7 @@

    QF_NRA (Single Query Track)

    - + cvc5-NRA-LS 0 240171685.80371546.2552401115812433945 @@ -266,7 +266,7 @@

    QF_NRA (Single Query Track)

    - + Yices2 0 233444594.32644499.527233411441190461461 @@ -275,7 +275,7 @@

    QF_NRA (Single Query Track)

    - + z3-alpha 0 232130779.78630690.846232112081113474345 @@ -284,7 +284,7 @@

    QF_NRA (Single Query Track)

    - + SMT-RAT-MCSAT 0 225186525.15386425.654225111301121544529 @@ -293,7 +293,7 @@

    QF_NRA (Single Query Track)

    - + 2022-Z3++-fixedn 1 254166187.86266166.712254112991242254250 @@ -313,7 +313,7 @@

    QF_NRA (Single Query Track)

    - + 2022-Z3++-fixedn 0 129932946.23332923.564129912990841412250 @@ -322,7 +322,7 @@

    QF_NRA (Single Query Track)

    - + Z3++ 0 129826033.99225931.666129812980851412252 @@ -331,7 +331,7 @@

    QF_NRA (Single Query Track)

    - + 2019-Par4n 0 126964823.73121715.9621269126901141412183 @@ -340,7 +340,7 @@

    QF_NRA (Single Query Track)

    - + z3-alpha 0 120811623.18511590.2591208120801751412345 @@ -349,7 +349,7 @@

    QF_NRA (Single Query Track)

    - + cvc5 0 120144690.91944450.0531201120101821412333 @@ -358,7 +358,7 @@

    QF_NRA (Single Query Track)

    - + cvc5-NRA-LS 0 115836394.89736361.98511581158022514125 @@ -367,7 +367,7 @@

    QF_NRA (Single Query Track)

    - + Yices2 0 114418293.40218248.3661144114402391412461 @@ -376,7 +376,7 @@

    QF_NRA (Single Query Track)

    - + SMT-RAT-MCSAT 0 113034399.21534388.5531130113002531412529 @@ -396,7 +396,7 @@

    QF_NRA (Single Query Track)

    - + 2019-Par4n 0 128141012.42313778.31128101281291485183 @@ -405,7 +405,7 @@

    QF_NRA (Single Query Track)

    - + cvc5 0 126162365.53462593.608126101261491485333 @@ -414,7 +414,7 @@

    QF_NRA (Single Query Track)

    - + cvc5-NRA-LS 0 124335290.90635184.271243012436714855 @@ -423,7 +423,7 @@

    QF_NRA (Single Query Track)

    - + Z3++ 0 124238704.87838648.967124201242681485252 @@ -432,7 +432,7 @@

    QF_NRA (Single Query Track)

    - + Yices2 0 119026300.92426251.1611190011901201485461 @@ -441,7 +441,7 @@

    QF_NRA (Single Query Track)

    - + SMT-RAT-MCSAT 0 112152125.93852037.1021121011211891485529 @@ -450,7 +450,7 @@

    QF_NRA (Single Query Track)

    - + z3-alpha 0 111319156.60219100.5861113011131971485345 @@ -459,7 +459,7 @@

    QF_NRA (Single Query Track)

    - + 2022-Z3++-fixedn 1 124233241.62933243.148124201242681485250 @@ -479,7 +479,7 @@

    QF_NRA (Single Query Track)

    - + 2019-Par4n 0 23777495.0742623.115237711881189418356 @@ -488,7 +488,7 @@

    QF_NRA (Single Query Track)

    - + cvc5 0 21862782.1782733.827218610921094609609 @@ -497,7 +497,7 @@

    QF_NRA (Single Query Track)

    - + 2022-Z3++-fixedn 0 21562288.0652284.674215611201036639638 @@ -506,7 +506,7 @@

    QF_NRA (Single Query Track)

    - + Z3++ 0 21562332.0782326.967215611201036639638 @@ -515,7 +515,7 @@

    QF_NRA (Single Query Track)

    - + Yices2 0 21381138.7191125.004213810571081657657 @@ -524,7 +524,7 @@

    QF_NRA (Single Query Track)

    - + z3-alpha 0 21303897.8373897.79721301140990665665 @@ -533,7 +533,7 @@

    QF_NRA (Single Query Track)

    - + cvc5-NRA-LS 0 21103474.9443404.137211010201090685685 @@ -542,7 +542,7 @@

    QF_NRA (Single Query Track)

    - + SMT-RAT-MCSAT 0 19183041.1892963.33819181033885877864 @@ -566,7 +566,6 @@

    QF_NRA (Single Query Track)

    - + - diff --git a/archive/2023/results/qf-rdl-model-validation.html b/archive/2023/results/qf-rdl-model-validation.html index a8f69329..cded6712 100644 --- a/archive/2023/results/qf-rdl-model-validation.html +++ b/archive/2023/results/qf-rdl-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_RDL (Model Validation Track)

    Competition results for the QF_RDL - + logic - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_RDL (Model Validation Track)

    Sequential PerformanceParallel Performance Yices2Yices2 - - + + @@ -126,7 +126,7 @@

    QF_RDL (Model Validation Track)

    - + Yices2 0 110 @@ -137,7 +137,7 @@

    QF_RDL (Model Validation Track)

    - + cvc5 0 107 @@ -148,7 +148,7 @@

    QF_RDL (Model Validation Track)

    - + 2022-OpenSMTn 0 104 @@ -159,7 +159,7 @@

    QF_RDL (Model Validation Track)

    - + OpenSMT 0 103 @@ -170,7 +170,7 @@

    QF_RDL (Model Validation Track)

    - + SMTInterpol 0 102 @@ -192,7 +192,7 @@

    QF_RDL (Model Validation Track)

    - + Yices2 0 1101727.6321727.8340 @@ -201,7 +201,7 @@

    QF_RDL (Model Validation Track)

    - + cvc5 0 1073277.8093254.383 @@ -210,7 +210,7 @@

    QF_RDL (Model Validation Track)

    - + 2022-OpenSMTn 0 1049603.3949580.326 @@ -219,7 +219,7 @@

    QF_RDL (Model Validation Track)

    - + OpenSMT 0 1038255.6068253.5267 @@ -228,7 +228,7 @@

    QF_RDL (Model Validation Track)

    - + SMTInterpol 0 1025613.8494654.1188 @@ -252,7 +252,6 @@

    QF_RDL (Model Validation Track)

    - + - diff --git a/archive/2023/results/qf-rdl-proof-exhibition.html b/archive/2023/results/qf-rdl-proof-exhibition.html index 0201aeaa..cedfeac7 100644 --- a/archive/2023/results/qf-rdl-proof-exhibition.html +++ b/archive/2023/results/qf-rdl-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_RDL (Proof Exhibition Track)

    Competition results for the QF_RDL - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_RDL (Proof Exhibition Track)

    - + SMTInterpol 0 79 @@ -130,7 +130,7 @@

    QF_RDL (Proof Exhibition Track)

    - + cvc5-lfsc 0 64 @@ -141,7 +141,7 @@

    QF_RDL (Proof Exhibition Track)

    - + cvc5 0 7 @@ -163,7 +163,7 @@

    QF_RDL (Proof Exhibition Track)

    - + SMTInterpol 0 798236.9476499.193434 @@ -172,7 +172,7 @@

    QF_RDL (Proof Exhibition Track)

    - + cvc5-lfsc 0 646896.6576894.5644949 @@ -181,7 +181,7 @@

    QF_RDL (Proof Exhibition Track)

    - + cvc5 0 7322.411318.963106102 @@ -205,7 +205,6 @@

    QF_RDL (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/qf-rdl-single-query.html b/archive/2023/results/qf-rdl-single-query.html index 6d972519..04cc1363 100644 --- a/archive/2023/results/qf-rdl-single-query.html +++ b/archive/2023/results/qf-rdl-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_RDL (Single Query Track)

    Competition results for the QF_RDL - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_RDL (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Yices2Yices2Yices2 - - + + Yices2 - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_RDL (Single Query Track)

    - + Yices2 0 214 @@ -142,7 +142,7 @@

    QF_RDL (Single Query Track)

    - + 2022-Yices2n 0 214 @@ -153,7 +153,7 @@

    QF_RDL (Single Query Track)

    - + 2021-Yices2n 0 214 @@ -164,7 +164,7 @@

    QF_RDL (Single Query Track)

    - + cvc5 0 211 @@ -175,7 +175,7 @@

    QF_RDL (Single Query Track)

    - + OpenSMT 0 192 @@ -186,7 +186,7 @@

    QF_RDL (Single Query Track)

    - + SMTInterpol 0 178 @@ -208,7 +208,7 @@

    QF_RDL (Single Query Track)

    - + Yices2 0 2143501.7573502.1362141061083333 @@ -217,7 +217,7 @@

    QF_RDL (Single Query Track)

    - + 2022-Yices2n 0 2143540.8543540.4882141061083333 @@ -226,7 +226,7 @@

    QF_RDL (Single Query Track)

    - + 2021-Yices2n 0 2143567.4383567.9372141061083333 @@ -235,7 +235,7 @@

    QF_RDL (Single Query Track)

    - + cvc5 0 2117333.9587335.1322111031083636 @@ -244,7 +244,7 @@

    QF_RDL (Single Query Track)

    - + OpenSMT 0 19216704.8516701.78419299935555 @@ -253,7 +253,7 @@

    QF_RDL (Single Query Track)

    - + SMTInterpol 0 17913693.82710055.66117998816868 @@ -273,7 +273,7 @@

    QF_RDL (Single Query Track)

    - + Yices2 0 1061766.951767.0911061060014133 @@ -282,7 +282,7 @@

    QF_RDL (Single Query Track)

    - + 2022-Yices2n 0 1061785.6851785.8221061060014133 @@ -291,7 +291,7 @@

    QF_RDL (Single Query Track)

    - + 2021-Yices2n 0 1061795.0421795.2791061060014133 @@ -300,7 +300,7 @@

    QF_RDL (Single Query Track)

    - + cvc5 0 1033273.9513274.4211031030314136 @@ -309,7 +309,7 @@

    QF_RDL (Single Query Track)

    - + OpenSMT 0 998230.4638226.50599990714155 @@ -318,7 +318,7 @@

    QF_RDL (Single Query Track)

    - + SMTInterpol 0 985541.2094637.73898980814168 @@ -338,7 +338,7 @@

    QF_RDL (Single Query Track)

    - + Yices2 0 1081734.8071735.0451080108113833 @@ -347,7 +347,7 @@

    QF_RDL (Single Query Track)

    - + 2022-Yices2n 0 1081755.1691754.6661080108113833 @@ -356,7 +356,7 @@

    QF_RDL (Single Query Track)

    - + 2021-Yices2n 0 1081772.3961772.6591080108113833 @@ -365,7 +365,7 @@

    QF_RDL (Single Query Track)

    - + cvc5 0 1084060.0074060.711080108113836 @@ -374,7 +374,7 @@

    QF_RDL (Single Query Track)

    - + OpenSMT 0 938474.3878475.279930931613855 @@ -383,7 +383,7 @@

    QF_RDL (Single Query Track)

    - + SMTInterpol 0 818152.6195417.923810812813868 @@ -403,7 +403,7 @@

    QF_RDL (Single Query Track)

    - + Yices2 0 193345.539345.65519399945454 @@ -412,7 +412,7 @@

    QF_RDL (Single Query Track)

    - + 2022-Yices2n 0 193349.486348.85919399945454 @@ -421,7 +421,7 @@

    QF_RDL (Single Query Track)

    - + 2021-Yices2n 0 192325.495325.6119299935555 @@ -430,7 +430,7 @@

    QF_RDL (Single Query Track)

    - + cvc5 0 164650.082650.24316483818383 @@ -439,7 +439,7 @@

    QF_RDL (Single Query Track)

    - + OpenSMT 0 142391.368386.5781427765105105 @@ -448,7 +448,7 @@

    QF_RDL (Single Query Track)

    - + SMTInterpol 0 1251474.121681.6981257154122122 @@ -472,7 +472,6 @@

    QF_RDL (Single Query Track)

    - + - diff --git a/archive/2023/results/qf-s-proof-exhibition.html b/archive/2023/results/qf-s-proof-exhibition.html index 8a5096ee..9db1925b 100644 --- a/archive/2023/results/qf-s-proof-exhibition.html +++ b/archive/2023/results/qf-s-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_S (Proof Exhibition Track)

    Competition results for the QF_S - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_S (Proof Exhibition Track)

    - + cvc5-lfsc 0 1616 @@ -130,7 +130,7 @@

    QF_S (Proof Exhibition Track)

    - + cvc5 0 1566 @@ -152,7 +152,7 @@

    QF_S (Proof Exhibition Track)

    - + cvc5-lfsc 0 16167316.2757283.2715454 @@ -161,7 +161,7 @@

    QF_S (Proof Exhibition Track)

    - + cvc5 0 15662812.2792795.09410493 @@ -185,7 +185,6 @@

    QF_S (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/qf-s-single-query.html b/archive/2023/results/qf-s-single-query.html index 3eddab21..8e303661 100644 --- a/archive/2023/results/qf-s-single-query.html +++ b/archive/2023/results/qf-s-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_S (Single Query Track)

    Competition results for the QF_S - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_S (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) OSTRICHOSTRICHz3-alpha - - + + OSTRICH - - + + z3-alpha - + @@ -131,7 +131,7 @@

    QF_S (Single Query Track)

    - + OSTRICH Fixedn 0 8800 @@ -142,7 +142,7 @@

    QF_S (Single Query Track)

    - + OSTRICH 0 8798 @@ -153,7 +153,7 @@

    QF_S (Single Query Track)

    - + z3-alpha 0 8797 @@ -164,7 +164,7 @@

    QF_S (Single Query Track)

    - + 2022-cvc5n 0 8794 @@ -175,7 +175,7 @@

    QF_S (Single Query Track)

    - + cvc5 0 8775 @@ -186,7 +186,7 @@

    QF_S (Single Query Track)

    - + Z3-Noodler 0 8711 @@ -197,7 +197,7 @@

    QF_S (Single Query Track)

    - + Z3-Noodler Fixedn 0 8710 @@ -219,7 +219,7 @@

    QF_S (Single Query Track)

    - + OSTRICH Fixedn 0 880038043.70423074.9168800581429864745 @@ -228,7 +228,7 @@

    QF_S (Single Query Track)

    - + OSTRICH 0 879835878.99621386.8738798581229864945 @@ -237,7 +237,7 @@

    QF_S (Single Query Track)

    - + z3-alpha 0 879710373.32310356.0488797583829595033 @@ -246,7 +246,7 @@

    QF_S (Single Query Track)

    - + 2022-cvc5n 0 879430623.5631066.18794583729575352 @@ -255,7 +255,7 @@

    QF_S (Single Query Track)

    - + cvc5 0 877527096.92728385.2918775583529407268 @@ -264,7 +264,7 @@

    QF_S (Single Query Track)

    - + Z3-Noodler 0 87118149.818136.35387115741297013665 @@ -273,7 +273,7 @@

    QF_S (Single Query Track)

    - + Z3-Noodler Fixedn 0 87106695.06685.94287105740297013766 @@ -293,7 +293,7 @@

    QF_S (Single Query Track)

    - + z3-alpha 0 58385676.25659.9458385838013299633 @@ -302,7 +302,7 @@

    QF_S (Single Query Track)

    - + 2022-cvc5n 0 583716436.96616604.86658375837014299652 @@ -311,7 +311,7 @@

    QF_S (Single Query Track)

    - + cvc5 0 58358571.4288613.2158355835016299668 @@ -320,7 +320,7 @@

    QF_S (Single Query Track)

    - + OSTRICH Fixedn 0 581427966.40516354.06558145814037299645 @@ -329,7 +329,7 @@

    QF_S (Single Query Track)

    - + OSTRICH 0 581225787.5514630.45558125812039299645 @@ -338,7 +338,7 @@

    QF_S (Single Query Track)

    - + Z3-Noodler 0 57414708.4174701.164574157410110299665 @@ -347,7 +347,7 @@

    QF_S (Single Query Track)

    - + Z3-Noodler Fixedn 0 57403983.8523976.405574057400111299666 @@ -367,7 +367,7 @@

    QF_S (Single Query Track)

    - + OSTRICH Fixedn 0 298610077.2996720.852986029864585745 @@ -376,7 +376,7 @@

    QF_S (Single Query Track)

    - + OSTRICH 0 298610091.4466756.4192986029864585745 @@ -385,7 +385,7 @@

    QF_S (Single Query Track)

    - + Z3-Noodler Fixedn 0 29702711.1482709.53729700297020585766 @@ -394,7 +394,7 @@

    QF_S (Single Query Track)

    - + Z3-Noodler 0 29703441.3933435.18829700297020585765 @@ -403,7 +403,7 @@

    QF_S (Single Query Track)

    - + z3-alpha 0 29594697.1234696.10829590295931585733 @@ -412,7 +412,7 @@

    QF_S (Single Query Track)

    - + 2022-cvc5n 0 295714186.59414461.23329570295733585752 @@ -421,7 +421,7 @@

    QF_S (Single Query Track)

    - + cvc5 0 294018525.49919772.08129400294050585768 @@ -441,7 +441,7 @@

    QF_S (Single Query Track)

    - + z3-alpha 0 87643612.1963593.3668764581229528383 @@ -450,7 +450,7 @@

    QF_S (Single Query Track)

    - + OSTRICH 0 874726440.0414234.621874757712976100100 @@ -459,7 +459,7 @@

    QF_S (Single Query Track)

    - + OSTRICH Fixedn 0 874426301.33514110.924874457682976103103 @@ -468,7 +468,7 @@

    QF_S (Single Query Track)

    - + 2022-cvc5n 0 86772045.3672020.823867757742903170170 @@ -477,7 +477,7 @@

    QF_S (Single Query Track)

    - + cvc5 0 86741322.7471302.688867457992875173169 @@ -486,7 +486,7 @@

    QF_S (Single Query Track)

    - + Z3-Noodler Fixedn 0 86631501.521491.312866357172946184113 @@ -495,7 +495,7 @@

    QF_S (Single Query Track)

    - + Z3-Noodler 0 86491534.2851518.422864957132936198127 @@ -519,7 +519,6 @@

    QF_S (Single Query Track)

    - + - diff --git a/archive/2023/results/qf-slia-proof-exhibition.html b/archive/2023/results/qf-slia-proof-exhibition.html index 5018ee1f..79ed3700 100644 --- a/archive/2023/results/qf-slia-proof-exhibition.html +++ b/archive/2023/results/qf-slia-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_SLIA (Proof Exhibition Track)

    Competition results for the QF_SLIA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_SLIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 7366 @@ -130,7 +130,7 @@

    QF_SLIA (Proof Exhibition Track)

    - + cvc5 0 6850 @@ -152,7 +152,7 @@

    QF_SLIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 736661284.00661188.663247237 @@ -161,7 +161,7 @@

    QF_SLIA (Proof Exhibition Track)

    - + cvc5 0 685067809.8167396.273763629 @@ -185,7 +185,6 @@

    QF_SLIA (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/qf-slia-single-query.html b/archive/2023/results/qf-slia-single-query.html index f17c7e04..e1239b5a 100644 --- a/archive/2023/results/qf-slia-single-query.html +++ b/archive/2023/results/qf-slia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_SLIA (Single Query Track)

    Competition results for the QF_SLIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_SLIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + z3-alpha - + @@ -131,7 +131,7 @@

    QF_SLIA (Single Query Track)

    - + cvc5 0 21446 @@ -142,7 +142,7 @@

    QF_SLIA (Single Query Track)

    - + 2022-cvc5n 0 20961 @@ -153,7 +153,7 @@

    QF_SLIA (Single Query Track)

    - + z3-alpha 0 20897 @@ -164,7 +164,7 @@

    QF_SLIA (Single Query Track)

    - + OSTRICH Fixedn 0 16916 @@ -175,7 +175,7 @@

    QF_SLIA (Single Query Track)

    - + Z3-Noodler Fixedn 1 12711 @@ -186,7 +186,7 @@

    QF_SLIA (Single Query Track)

    - + Z3-Noodler 5 12771 @@ -197,7 +197,7 @@

    QF_SLIA (Single Query Track)

    - + OSTRICH 10 16802 @@ -219,7 +219,7 @@

    QF_SLIA (Single Query Track)

    - + cvc5 0 21446493777.744501432.74321446140697377492489 @@ -228,7 +228,7 @@

    QF_SLIA (Single Query Track)

    - + 2022-cvc5n 0 20961536412.42544016.220961138537108977975 @@ -237,7 +237,7 @@

    QF_SLIA (Single Query Track)

    - + z3-alpha 0 20897179385.088178930.361208971359972981041802 @@ -246,7 +246,7 @@

    QF_SLIA (Single Query Track)

    - + OSTRICH Fixedn 0 16942519856.632280959.4169429586735649964990 @@ -255,7 +255,7 @@

    QF_SLIA (Single Query Track)

    - + Z3-Noodler Fixedn 1 12711205466.561205392.597127117344536792272042 @@ -264,7 +264,7 @@

    QF_SLIA (Single Query Track)

    - + Z3-Noodler 5 12770193646.192193653.759127707405536591681988 @@ -273,7 +273,7 @@

    QF_SLIA (Single Query Track)

    - + OSTRICH 10 16812444576.608236184.521168129455735751265029 @@ -293,7 +293,7 @@

    QF_SLIA (Single Query Track)

    - + cvc5 0 14069404207.163411349.395140691406901487721489 @@ -302,7 +302,7 @@

    QF_SLIA (Single Query Track)

    - + 2022-cvc5n 0 13853453942.995461410.672138531385303647721975 @@ -311,7 +311,7 @@

    QF_SLIA (Single Query Track)

    - + z3-alpha 0 13599171340.306170955.777135991359906187721802 @@ -320,7 +320,7 @@

    QF_SLIA (Single Query Track)

    - + OSTRICH Fixedn 0 9586395576.734204661.585958695860463177214990 @@ -329,7 +329,7 @@

    QF_SLIA (Single Query Track)

    - + Z3-Noodler Fixedn 0 7344199311.53199236.209734473440687377212042 @@ -338,7 +338,7 @@

    QF_SLIA (Single Query Track)

    - + OSTRICH 1 9455319989.546159509.256945594550476277215029 @@ -347,7 +347,7 @@

    QF_SLIA (Single Query Track)

    - + Z3-Noodler 4 7405187444.108187450.515740574050681277211988 @@ -367,7 +367,7 @@

    QF_SLIA (Single Query Track)

    - + cvc5 0 737789570.58190083.34873770737722714334489 @@ -376,7 +376,7 @@

    QF_SLIA (Single Query Track)

    - + OSTRICH Fixedn 0 7356124279.89876297.815735607356248143344990 @@ -385,7 +385,7 @@

    QF_SLIA (Single Query Track)

    - + z3-alpha 0 72988044.7817974.58572980729830614334802 @@ -394,7 +394,7 @@

    QF_SLIA (Single Query Track)

    - + 2022-cvc5n 0 710882469.42682605.52871080710849614334975 @@ -403,7 +403,7 @@

    QF_SLIA (Single Query Track)

    - + Z3-Noodler Fixedn 1 53676155.0316156.3885367053672237143342042 @@ -412,7 +412,7 @@

    QF_SLIA (Single Query Track)

    - + Z3-Noodler 1 53656202.0836203.2435365053652239143341988 @@ -421,7 +421,7 @@

    QF_SLIA (Single Query Track)

    - + OSTRICH 9 7357124587.06176675.265735707357247143345029 @@ -441,7 +441,7 @@

    QF_SLIA (Single Query Track)

    - + z3-alpha 0 1998624673.62724346.01998612750723619521814 @@ -450,7 +450,7 @@

    QF_SLIA (Single Query Track)

    - + cvc5 0 1983711536.38711324.3571983712774706321012099 @@ -459,7 +459,7 @@

    QF_SLIA (Single Query Track)

    - + 2022-cvc5n 0 1925512926.9412760.281925512418683726832681 @@ -468,7 +468,7 @@

    QF_SLIA (Single Query Track)

    - + OSTRICH Fixedn 0 15195182021.61568285.712151958055714067436742 @@ -477,7 +477,7 @@

    QF_SLIA (Single Query Track)

    - + Z3-Noodler Fixedn 1 121563482.2163461.933121566816534097822690 @@ -486,7 +486,7 @@

    QF_SLIA (Single Query Track)

    - + Z3-Noodler 5 121793359.2193337.786121796843533697592606 @@ -495,7 +495,7 @@

    QF_SLIA (Single Query Track)

    - + OSTRICH 9 15237183465.37468723.561152378094714367016683 @@ -519,7 +519,6 @@

    QF_SLIA (Single Query Track)

    - + - diff --git a/archive/2023/results/qf-snia-single-query.html b/archive/2023/results/qf-snia-single-query.html index 1d10292b..5ac4cb12 100644 --- a/archive/2023/results/qf-snia-single-query.html +++ b/archive/2023/results/qf-snia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_SNIA (Single Query Track)

    Competition results for the QF_SNIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_SNIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) z3-alphaz3-alphaz3-alpha - - + + — - - + + z3-alpha - + @@ -131,7 +131,7 @@

    QF_SNIA (Single Query Track)

    - + z3-alpha 0 70 @@ -142,7 +142,7 @@

    QF_SNIA (Single Query Track)

    - + cvc5 0 70 @@ -153,7 +153,7 @@

    QF_SNIA (Single Query Track)

    - + 2022-cvc5n 0 70 @@ -164,7 +164,7 @@

    QF_SNIA (Single Query Track)

    - + OSTRICH 0 70 @@ -175,7 +175,7 @@

    QF_SNIA (Single Query Track)

    - + OSTRICH Fixedn 0 70 @@ -197,7 +197,7 @@

    QF_SNIA (Single Query Track)

    - + z3-alpha 0 701.4421.4427070000 @@ -206,7 +206,7 @@

    QF_SNIA (Single Query Track)

    - + cvc5 0 701.4921.4597070000 @@ -215,7 +215,7 @@

    QF_SNIA (Single Query Track)

    - + 2022-cvc5n 0 702.1032.0737070000 @@ -224,7 +224,7 @@

    QF_SNIA (Single Query Track)

    - + OSTRICH 0 70194.424112.7957070000 @@ -233,7 +233,7 @@

    QF_SNIA (Single Query Track)

    - + OSTRICH Fixedn 0 70207.123115.617070000 @@ -253,7 +253,7 @@

    QF_SNIA (Single Query Track)

    - + z3-alpha 0 701.4421.44270700000 @@ -262,7 +262,7 @@

    QF_SNIA (Single Query Track)

    - + cvc5 0 701.4921.45970700000 @@ -271,7 +271,7 @@

    QF_SNIA (Single Query Track)

    - + 2022-cvc5n 0 702.1032.07370700000 @@ -280,7 +280,7 @@

    QF_SNIA (Single Query Track)

    - + OSTRICH 0 70194.424112.79570700000 @@ -289,7 +289,7 @@

    QF_SNIA (Single Query Track)

    - + OSTRICH Fixedn 0 70207.123115.6170700000 @@ -309,7 +309,7 @@

    QF_SNIA (Single Query Track)

    - + 2022-cvc5n 0 00.00.00000700 @@ -318,7 +318,7 @@

    QF_SNIA (Single Query Track)

    - + cvc5 0 00.00.00000700 @@ -327,7 +327,7 @@

    QF_SNIA (Single Query Track)

    - + z3-alpha 0 00.00.00000700 @@ -336,7 +336,7 @@

    QF_SNIA (Single Query Track)

    - + OSTRICH 0 00.00.00000700 @@ -345,7 +345,7 @@

    QF_SNIA (Single Query Track)

    - + OSTRICH Fixedn 0 00.00.00000700 @@ -365,7 +365,7 @@

    QF_SNIA (Single Query Track)

    - + z3-alpha 0 701.4421.4427070000 @@ -374,7 +374,7 @@

    QF_SNIA (Single Query Track)

    - + cvc5 0 701.4921.4597070000 @@ -383,7 +383,7 @@

    QF_SNIA (Single Query Track)

    - + 2022-cvc5n 0 702.1032.0737070000 @@ -392,7 +392,7 @@

    QF_SNIA (Single Query Track)

    - + OSTRICH 0 70194.424112.7957070000 @@ -401,7 +401,7 @@

    QF_SNIA (Single Query Track)

    - + OSTRICH Fixedn 0 70207.123115.617070000 @@ -425,7 +425,6 @@

    QF_SNIA (Single Query Track)

    - + - diff --git a/archive/2023/results/qf-strings-proof-exhibition.html b/archive/2023/results/qf-strings-proof-exhibition.html index 167cfb58..f49d9e0c 100644 --- a/archive/2023/results/qf-strings-proof-exhibition.html +++ b/archive/2023/results/qf-strings-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Strings (Proof Exhibition Track)

    Competition results for the QF_Strings - + division - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_Strings (Proof Exhibition Track)

    - + cvc5-lfsc 0 8982 @@ -130,7 +130,7 @@

    QF_Strings (Proof Exhibition Track)

    - + cvc5 0 8416 @@ -152,7 +152,7 @@

    QF_Strings (Proof Exhibition Track)

    - + cvc5-lfsc 0 898268600.28268471.9343010291 @@ -161,7 +161,7 @@

    QF_Strings (Proof Exhibition Track)

    - + cvc5 0 841670622.0970191.3678670722 @@ -185,7 +185,6 @@

    QF_Strings (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/qf-strings-single-query.html b/archive/2023/results/qf-strings-single-query.html index a9213177..26097b51 100644 --- a/archive/2023/results/qf-strings-single-query.html +++ b/archive/2023/results/qf-strings-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_Strings (Single Query Track)

    Competition results for the QF_Strings - + division - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_Strings (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + z3-alpha - + @@ -131,7 +131,7 @@

    QF_Strings (Single Query Track)

    - + cvc5 0 30291 @@ -142,7 +142,7 @@

    QF_Strings (Single Query Track)

    - + 2022-cvc5n 0 29825 @@ -153,7 +153,7 @@

    QF_Strings (Single Query Track)

    - + z3-alpha 0 29764 @@ -164,7 +164,7 @@

    QF_Strings (Single Query Track)

    - + OSTRICH Fixedn 0 25786 @@ -175,7 +175,7 @@

    QF_Strings (Single Query Track)

    - + Z3-Noodler Fixedn 1 21421 @@ -186,7 +186,7 @@

    QF_Strings (Single Query Track)

    - + Z3-Noodler 5 21482 @@ -197,7 +197,7 @@

    QF_Strings (Single Query Track)

    - + OSTRICH 10 25670 @@ -219,7 +219,7 @@

    QF_Strings (Single Query Track)

    - + cvc5 0 30291520876.164529819.4933029119974103175640557 @@ -228,7 +228,7 @@

    QF_Strings (Single Query Track)

    - + 2022-cvc5n 0 29825567038.083575084.373298251976010065103001027 @@ -237,7 +237,7 @@

    QF_Strings (Single Query Track)

    - + z3-alpha 0 29764189759.852189287.85129764195071025710910835 @@ -246,7 +246,7 @@

    QF_Strings (Single Query Track)

    - + OSTRICH Fixedn 0 25812558107.459304149.926258121547010342504305035 @@ -255,7 +255,7 @@

    QF_Strings (Single Query Track)

    - + Z3-Noodler Fixedn 1 21421212161.562212078.539214211308483379364702108 @@ -264,7 +264,7 @@

    QF_Strings (Single Query Track)

    - + Z3-Noodler 5 21481201796.002201790.111214811314683359304702053 @@ -273,7 +273,7 @@

    QF_Strings (Single Query Track)

    - + OSTRICH 10 25680480650.027257684.189256801533710343517505074 @@ -293,7 +293,7 @@

    QF_Strings (Single Query Track)

    - + cvc5 0 19974412780.084419964.0641997419974016410717557 @@ -302,7 +302,7 @@

    QF_Strings (Single Query Track)

    - + 2022-cvc5n 0 19760470382.063478017.61219760197600378107171027 @@ -311,7 +311,7 @@

    QF_Strings (Single Query Track)

    - + z3-alpha 0 19507177017.948176617.1591950719507063110717835 @@ -320,7 +320,7 @@

    QF_Strings (Single Query Track)

    - + OSTRICH Fixedn 0 15470423750.262221131.26154701547004668107175035 @@ -329,7 +329,7 @@

    QF_Strings (Single Query Track)

    - + Z3-Noodler Fixedn 0 13084203295.382203212.614130841308406984107872108 @@ -338,7 +338,7 @@

    QF_Strings (Single Query Track)

    - + OSTRICH 1 15337345971.52174252.506153371533704801107175074 @@ -347,7 +347,7 @@

    QF_Strings (Single Query Track)

    - + Z3-Noodler 4 13146192152.525192151.68131461314606922107872053 @@ -367,7 +367,7 @@

    QF_Strings (Single Query Track)

    - + OSTRICH Fixedn 0 10342134357.19783018.66610342010342252202615035 @@ -376,7 +376,7 @@

    QF_Strings (Single Query Track)

    - + cvc5 0 10317108096.08109855.4291031701031727720261557 @@ -385,7 +385,7 @@

    QF_Strings (Single Query Track)

    - + z3-alpha 0 1025712741.90412670.6921025701025733720261835 @@ -394,7 +394,7 @@

    QF_Strings (Single Query Track)

    - + 2022-cvc5n 0 1006596656.0297066.76110065010065529202611027 @@ -403,7 +403,7 @@

    QF_Strings (Single Query Track)

    - + Z3-Noodler Fixedn 1 83378866.1798865.9258337083372257202612108 @@ -412,7 +412,7 @@

    QF_Strings (Single Query Track)

    - + Z3-Noodler 1 83359643.4779638.4328335083352259202612053 @@ -421,7 +421,7 @@

    QF_Strings (Single Query Track)

    - + OSTRICH 9 10343134678.50783431.68410343010343251202615074 @@ -441,7 +441,7 @@

    QF_Strings (Single Query Track)

    - + z3-alpha 0 2882028287.26427940.808288201863210188203501897 @@ -450,7 +450,7 @@

    QF_Strings (Single Query Track)

    - + cvc5 0 2858112860.62612628.50428581186439938227402268 @@ -459,7 +459,7 @@

    QF_Strings (Single Query Track)

    - + 2022-cvc5n 0 2800214974.40914783.17628002182629740285302851 @@ -468,7 +468,7 @@

    QF_Strings (Single Query Track)

    - + OSTRICH Fixedn 0 24009208530.07382512.246240091389310116684606845 @@ -477,7 +477,7 @@

    QF_Strings (Single Query Track)

    - + Z3-Noodler Fixedn 1 208194983.7364953.244208191253382869966702803 @@ -486,7 +486,7 @@

    QF_Strings (Single Query Track)

    - + Z3-Noodler 5 208284893.5044856.207208281255682729957702733 @@ -495,7 +495,7 @@

    QF_Strings (Single Query Track)

    - + OSTRICH 9 24054210099.83783070.977240541393510119680106783 @@ -519,7 +519,6 @@

    QF_Strings (Single Query Track)

    - + - diff --git a/archive/2023/results/qf-uf-incremental.html b/archive/2023/results/qf-uf-incremental.html index 4cd18ed6..0a8dc02f 100644 --- a/archive/2023/results/qf-uf-incremental.html +++ b/archive/2023/results/qf-uf-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UF (Incremental Track)

    Competition results for the QF_UF - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_UF (Incremental Track)

    Parallel Performance cvc5 - - + + @@ -124,7 +124,7 @@

    QF_UF (Incremental Track)

    - + cvc5 0 14063614.26682.7400 @@ -133,7 +133,7 @@

    QF_UF (Incremental Track)

    - + SMTInterpol 0 140633735.651564.1300 @@ -142,7 +142,7 @@

    QF_UF (Incremental Track)

    - + Yices2 Fixedn 0 76223.8677.13133010 @@ -151,7 +151,7 @@

    QF_UF (Incremental Track)

    - + Yices2 0 76224.2277.43133010 @@ -160,7 +160,7 @@

    QF_UF (Incremental Track)

    - + 2022-Yices2n 0 76231.1882.75133010 @@ -169,7 +169,7 @@

    QF_UF (Incremental Track)

    - + OpenSMT 0 762787.49833.63133010 @@ -193,7 +193,6 @@

    QF_UF (Incremental Track)

    - + - diff --git a/archive/2023/results/qf-uf-model-validation.html b/archive/2023/results/qf-uf-model-validation.html index d05cab00..739acc94 100644 --- a/archive/2023/results/qf-uf-model-validation.html +++ b/archive/2023/results/qf-uf-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UF (Model Validation Track)

    Competition results for the QF_UF - + logic - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_UF (Model Validation Track)

    Sequential PerformanceParallel Performance Yices2Yices2 - - + + @@ -126,7 +126,7 @@

    QF_UF (Model Validation Track)

    - + 2021-Yices2 model-validationn 0 1571 @@ -137,7 +137,7 @@

    QF_UF (Model Validation Track)

    - + 2022-Yices2n 0 1571 @@ -148,7 +148,7 @@

    QF_UF (Model Validation Track)

    - + Yices2 0 1571 @@ -159,7 +159,7 @@

    QF_UF (Model Validation Track)

    - + OpenSMT 0 1571 @@ -170,7 +170,7 @@

    QF_UF (Model Validation Track)

    - + cvc5 0 1571 @@ -181,7 +181,7 @@

    QF_UF (Model Validation Track)

    - + SMTInterpol 0 1571 @@ -203,7 +203,7 @@

    QF_UF (Model Validation Track)

    - + 2021-Yices2 model-validationn 0 157167.85970.1240 @@ -212,7 +212,7 @@

    QF_UF (Model Validation Track)

    - + 2022-Yices2n 0 157167.87770.1280 @@ -221,7 +221,7 @@

    QF_UF (Model Validation Track)

    - + Yices2 0 157168.69570.910 @@ -230,7 +230,7 @@

    QF_UF (Model Validation Track)

    - + OpenSMT 0 1571260.241269.5460 @@ -239,7 +239,7 @@

    QF_UF (Model Validation Track)

    - + cvc5 0 1571459.001453.6260 @@ -248,7 +248,7 @@

    QF_UF (Model Validation Track)

    - + SMTInterpol 0 15714261.4141700.0920 @@ -272,7 +272,6 @@

    QF_UF (Model Validation Track)

    - + - diff --git a/archive/2023/results/qf-uf-proof-exhibition.html b/archive/2023/results/qf-uf-proof-exhibition.html index e405fd10..f4428e15 100644 --- a/archive/2023/results/qf-uf-proof-exhibition.html +++ b/archive/2023/results/qf-uf-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UF (Proof Exhibition Track)

    Competition results for the QF_UF - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_UF (Proof Exhibition Track)

    - + cvc5-lfsc 0 1051 @@ -130,7 +130,7 @@

    QF_UF (Proof Exhibition Track)

    - + SMTInterpol 0 1045 @@ -141,7 +141,7 @@

    QF_UF (Proof Exhibition Track)

    - + cvc5 0 60 @@ -163,7 +163,7 @@

    QF_UF (Proof Exhibition Track)

    - + cvc5-lfsc 0 105161439.46961314.7113939 @@ -172,7 +172,7 @@

    QF_UF (Proof Exhibition Track)

    - + SMTInterpol 0 104531121.81517940.1224545 @@ -181,7 +181,7 @@

    QF_UF (Proof Exhibition Track)

    - + cvc5 0 60382.129378.19910301005 @@ -205,7 +205,6 @@

    QF_UF (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/qf-uf-single-query.html b/archive/2023/results/qf-uf-single-query.html index d4699df8..2ca5a25a 100644 --- a/archive/2023/results/qf-uf-single-query.html +++ b/archive/2023/results/qf-uf-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UF (Single Query Track)

    Competition results for the QF_UF - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_UF (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Yices2Yices2Yices2 - - + + Yices2 - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_UF (Single Query Track)

    - + 2022-Yices2n 0 3478 @@ -142,7 +142,7 @@

    QF_UF (Single Query Track)

    - + Yices2 0 3478 @@ -153,7 +153,7 @@

    QF_UF (Single Query Track)

    - + OpenSMT 0 3478 @@ -164,7 +164,7 @@

    QF_UF (Single Query Track)

    - + cvc5 0 3476 @@ -175,7 +175,7 @@

    QF_UF (Single Query Track)

    - + SMTInterpol 0 3407 @@ -197,7 +197,7 @@

    QF_UF (Single Query Track)

    - + 2022-Yices2n 0 3478642.547647.70534781458202000 @@ -206,7 +206,7 @@

    QF_UF (Single Query Track)

    - + Yices2 0 3478673.35674.01434781458202000 @@ -215,7 +215,7 @@

    QF_UF (Single Query Track)

    - + OpenSMT 0 34786192.0996127.86434781458202000 @@ -224,7 +224,7 @@

    QF_UF (Single Query Track)

    - + cvc5 0 34765095.7815076.9234761458201822 @@ -233,7 +233,7 @@

    QF_UF (Single Query Track)

    - + SMTInterpol 0 341443593.84921162.4913414145819566464 @@ -253,7 +253,7 @@

    QF_UF (Single Query Track)

    - + 2022-Yices2n 0 145860.11162.548145814580020200 @@ -262,7 +262,7 @@

    QF_UF (Single Query Track)

    - + Yices2 0 145860.87363.392145814580020200 @@ -271,7 +271,7 @@

    QF_UF (Single Query Track)

    - + OpenSMT 0 1458331.51330.809145814580020200 @@ -280,7 +280,7 @@

    QF_UF (Single Query Track)

    - + cvc5 0 1458528.828523.759145814580020202 @@ -289,7 +289,7 @@

    QF_UF (Single Query Track)

    - + SMTInterpol 0 14584211.6371676.1991458145800202064 @@ -309,7 +309,7 @@

    QF_UF (Single Query Track)

    - + 2022-Yices2n 0 2020582.437585.157202002020014580 @@ -318,7 +318,7 @@

    QF_UF (Single Query Track)

    - + Yices2 0 2020612.478610.622202002020014580 @@ -327,7 +327,7 @@

    QF_UF (Single Query Track)

    - + OpenSMT 0 20205860.5895797.055202002020014580 @@ -336,7 +336,7 @@

    QF_UF (Single Query Track)

    - + cvc5 0 20184566.9534553.16201802018214582 @@ -345,7 +345,7 @@

    QF_UF (Single Query Track)

    - + SMTInterpol 0 195639382.21219486.29319560195664145864 @@ -365,7 +365,7 @@

    QF_UF (Single Query Track)

    - + 2022-Yices2n 0 3475247.277252.37434751458201733 @@ -374,7 +374,7 @@

    QF_UF (Single Query Track)

    - + Yices2 0 3475255.178255.74934751458201733 @@ -383,7 +383,7 @@

    QF_UF (Single Query Track)

    - + cvc5 0 34421951.1841931.7883442145819843636 @@ -392,7 +392,7 @@

    QF_UF (Single Query Track)

    - + OpenSMT 0 34351798.0791785.7683435145819774343 @@ -401,7 +401,7 @@

    QF_UF (Single Query Track)

    - + SMTInterpol 0 333220923.3348809.928333214581874146146 @@ -425,7 +425,6 @@

    QF_UF (Single Query Track)

    - + - diff --git a/archive/2023/results/qf-uf-unsat-core.html b/archive/2023/results/qf-uf-unsat-core.html index 4afc05ac..66aa78d6 100644 --- a/archive/2023/results/qf-uf-unsat-core.html +++ b/archive/2023/results/qf-uf-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UF (Unsat Core Track)

    Competition results for the QF_UF - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_UF (Unsat Core Track)

    Sequential PerformanceParallel Performance Yices2SMTInterpol - - + + @@ -126,7 +126,7 @@

    QF_UF (Unsat Core Track)

    - + 2022-z3-4.8.17n 0 300004 @@ -137,7 +137,7 @@

    QF_UF (Unsat Core Track)

    - + Yices2 0 298721 @@ -148,7 +148,7 @@

    QF_UF (Unsat Core Track)

    - + SMTInterpol 0 295571 @@ -159,7 +159,7 @@

    QF_UF (Unsat Core Track)

    - + cvc5 0 218164 @@ -181,7 +181,7 @@

    QF_UF (Unsat Core Track)

    - + 2022-z3-4.8.17n 0 3000045425.6635410.9130 @@ -190,7 +190,7 @@

    QF_UF (Unsat Core Track)

    - + SMTInterpol 0 29988536659.76117773.221 @@ -199,7 +199,7 @@

    QF_UF (Unsat Core Track)

    - + Yices2 0 2987213592.7753531.7850 @@ -208,7 +208,7 @@

    QF_UF (Unsat Core Track)

    - + cvc5 0 2181645092.665036.7771 @@ -232,7 +232,6 @@

    QF_UF (Unsat Core Track)

    - + - diff --git a/archive/2023/results/qf-ufbv-incremental.html b/archive/2023/results/qf-ufbv-incremental.html index c2d1e605..e83e1584 100644 --- a/archive/2023/results/qf-ufbv-incremental.html +++ b/archive/2023/results/qf-ufbv-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFBV (Incremental Track)

    Competition results for the QF_UFBV - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_UFBV (Incremental Track)

    Parallel Performance Bitwuzla - - + + @@ -124,7 +124,7 @@

    QF_UFBV (Incremental Track)

    - + Bitwuzla 0 29961876.472095.4400 @@ -133,7 +133,7 @@

    QF_UFBV (Incremental Track)

    - + Yices2 0 2994548.72703.3722 @@ -142,7 +142,7 @@

    QF_UFBV (Incremental Track)

    - + Yices2 Fixedn 0 2994555.91710.8822 @@ -151,7 +151,7 @@

    QF_UFBV (Incremental Track)

    - + 2022-Yices2n 0 2994556.63713.6722 @@ -160,7 +160,7 @@

    QF_UFBV (Incremental Track)

    - + cvc5 0 280911111.2211175.711877 @@ -184,7 +184,6 @@

    QF_UFBV (Incremental Track)

    - + - diff --git a/archive/2023/results/qf-ufbv-model-validation.html b/archive/2023/results/qf-ufbv-model-validation.html index 555bbc7f..aa9ec963 100644 --- a/archive/2023/results/qf-ufbv-model-validation.html +++ b/archive/2023/results/qf-ufbv-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFBV (Model Validation Track)

    Competition results for the QF_UFBV - + logic - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_UFBV (Model Validation Track)

    Sequential PerformanceParallel Performance BitwuzlaBitwuzla - - + + @@ -126,7 +126,7 @@

    QF_UFBV (Model Validation Track)

    - + Bitwuzla 0 411 @@ -137,7 +137,7 @@

    QF_UFBV (Model Validation Track)

    - + Bitwuzla Fixedn 0 411 @@ -148,7 +148,7 @@

    QF_UFBV (Model Validation Track)

    - + 2022-Bitwuzlan 0 406 @@ -159,7 +159,7 @@

    QF_UFBV (Model Validation Track)

    - + Yices2 0 401 @@ -170,7 +170,7 @@

    QF_UFBV (Model Validation Track)

    - + cvc5 0 388 @@ -192,7 +192,7 @@

    QF_UFBV (Model Validation Track)

    - + Bitwuzla 0 41114080.74314087.1821 @@ -201,7 +201,7 @@

    QF_UFBV (Model Validation Track)

    - + Bitwuzla Fixedn 0 41114200.80614148.051 @@ -210,7 +210,7 @@

    QF_UFBV (Model Validation Track)

    - + 2022-Bitwuzlan 0 4064750.4454701.5464 @@ -219,7 +219,7 @@

    QF_UFBV (Model Validation Track)

    - + Yices2 0 40111433.11911404.25511 @@ -228,7 +228,7 @@

    QF_UFBV (Model Validation Track)

    - + cvc5 0 38814939.29314941.77124 @@ -252,7 +252,6 @@

    QF_UFBV (Model Validation Track)

    - + - diff --git a/archive/2023/results/qf-ufbv-proof-exhibition.html b/archive/2023/results/qf-ufbv-proof-exhibition.html index c62b2111..42b2a80c 100644 --- a/archive/2023/results/qf-ufbv-proof-exhibition.html +++ b/archive/2023/results/qf-ufbv-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFBV (Proof Exhibition Track)

    Competition results for the QF_UFBV - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_UFBV (Proof Exhibition Track)

    - + cvc5-lfsc 0 69 @@ -130,7 +130,7 @@

    QF_UFBV (Proof Exhibition Track)

    - + cvc5 0 3 @@ -152,7 +152,7 @@

    QF_UFBV (Proof Exhibition Track)

    - + cvc5-lfsc 0 6919569.89919503.285103102 @@ -161,7 +161,7 @@

    QF_UFBV (Proof Exhibition Track)

    - + cvc5 0 30.8770.867169165 @@ -185,7 +185,6 @@

    QF_UFBV (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/qf-ufbv-single-query.html b/archive/2023/results/qf-ufbv-single-query.html index 3f2a7d30..3274515b 100644 --- a/archive/2023/results/qf-ufbv-single-query.html +++ b/archive/2023/results/qf-ufbv-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFBV (Single Query Track)

    Competition results for the QF_UFBV - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_UFBV (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) BitwuzlaBitwuzlaBitwuzla - - + + Bitwuzla - - + + Bitwuzla - + @@ -131,7 +131,7 @@

    QF_UFBV (Single Query Track)

    - + 2022-Bitwuzlan 0 287 @@ -142,7 +142,7 @@

    QF_UFBV (Single Query Track)

    - + Bitwuzla Fixedn 0 284 @@ -153,7 +153,7 @@

    QF_UFBV (Single Query Track)

    - + Bitwuzla 0 284 @@ -164,7 +164,7 @@

    QF_UFBV (Single Query Track)

    - + Yices2 0 248 @@ -175,7 +175,7 @@

    QF_UFBV (Single Query Track)

    - + cvc5 0 246 @@ -186,7 +186,7 @@

    QF_UFBV (Single Query Track)

    - + Z3-Owl Fixedn 0 63 @@ -197,7 +197,7 @@

    QF_UFBV (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 0 8 @@ -208,7 +208,7 @@

    QF_UFBV (Single Query Track)

    - + Z3-Owl 4 217 @@ -230,7 +230,7 @@

    QF_UFBV (Single Query Track)

    - + 2022-Bitwuzlan 0 28712591.34812589.7372871781091312 @@ -239,7 +239,7 @@

    QF_UFBV (Single Query Track)

    - + Bitwuzla 0 28419921.4919865.3112841761081615 @@ -248,7 +248,7 @@

    QF_UFBV (Single Query Track)

    - + Bitwuzla Fixedn 0 28419914.50819900.0842841761081615 @@ -257,7 +257,7 @@

    QF_UFBV (Single Query Track)

    - + Yices2 0 24834926.40134908.5248168805252 @@ -266,7 +266,7 @@

    QF_UFBV (Single Query Track)

    - + cvc5 0 24635213.1735161.613246155915444 @@ -275,7 +275,7 @@

    QF_UFBV (Single Query Track)

    - + Z3-Owl Fixedn 0 632953.4192958.47863402323776 @@ -284,7 +284,7 @@

    QF_UFBV (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 0 95195.0564733.648990291204 @@ -293,7 +293,7 @@

    QF_UFBV (Single Query Track)

    - + Z3-Owl 4 21739842.75539850.645217144738379 @@ -313,7 +313,7 @@

    QF_UFBV (Single Query Track)

    - + 2022-Bitwuzlan 0 1785450.985447.4051781780511712 @@ -322,7 +322,7 @@

    QF_UFBV (Single Query Track)

    - + Bitwuzla 0 17614078.36114079.4571761760711715 @@ -331,7 +331,7 @@

    QF_UFBV (Single Query Track)

    - + Bitwuzla Fixedn 0 17614113.21614117.8291761760711715 @@ -340,7 +340,7 @@

    QF_UFBV (Single Query Track)

    - + Yices2 0 16812924.51112904.96416816801511752 @@ -349,7 +349,7 @@

    QF_UFBV (Single Query Track)

    - + cvc5 0 15514527.53614480.68415515502811744 @@ -358,7 +358,7 @@

    QF_UFBV (Single Query Track)

    - + Z3-Owl Fixedn 0 402048.6372051.2914040014311776 @@ -367,7 +367,7 @@

    QF_UFBV (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 0 95195.0564733.648990174117204 @@ -376,7 +376,7 @@

    QF_UFBV (Single Query Track)

    - + Z3-Owl 4 14422507.97522512.33614414403911779 @@ -396,7 +396,7 @@

    QF_UFBV (Single Query Track)

    - + 2022-Bitwuzlan 0 1097140.3697142.3331090109119012 @@ -405,7 +405,7 @@

    QF_UFBV (Single Query Track)

    - + Bitwuzla Fixedn 0 1085801.2925782.2551080108219015 @@ -414,7 +414,7 @@

    QF_UFBV (Single Query Track)

    - + Bitwuzla 0 1085843.1295785.8541080108219015 @@ -423,7 +423,7 @@

    QF_UFBV (Single Query Track)

    - + cvc5 0 9120685.63320680.929910911919044 @@ -432,7 +432,7 @@

    QF_UFBV (Single Query Track)

    - + Yices2 0 8022001.8922003.536800803019052 @@ -441,7 +441,7 @@

    QF_UFBV (Single Query Track)

    - + Z3-Owl 0 7317334.7817338.309730733719079 @@ -450,7 +450,7 @@

    QF_UFBV (Single Query Track)

    - + Z3-Owl Fixedn 0 23904.782907.187230238719076 @@ -459,7 +459,7 @@

    QF_UFBV (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 0 00.00.0000110190204 @@ -479,7 +479,7 @@

    QF_UFBV (Single Query Track)

    - + 2022-Bitwuzlan 0 192759.751755.30419213062108107 @@ -488,7 +488,7 @@

    QF_UFBV (Single Query Track)

    - + Bitwuzla 0 181781.914780.32518111764119119 @@ -497,7 +497,7 @@

    QF_UFBV (Single Query Track)

    - + Bitwuzla Fixedn 0 180776.284756.10118011763120120 @@ -506,7 +506,7 @@

    QF_UFBV (Single Query Track)

    - + Yices2 0 178391.875370.63417813939122122 @@ -515,7 +515,7 @@

    QF_UFBV (Single Query Track)

    - + cvc5 0 134894.317871.30813410331166157 @@ -524,7 +524,7 @@

    QF_UFBV (Single Query Track)

    - + Z3-Owl Fixedn 0 55154.216155.587553619245180 @@ -533,7 +533,7 @@

    QF_UFBV (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 0 116.3315.477110299262 @@ -542,7 +542,7 @@

    QF_UFBV (Single Query Track)

    - + Z3-Owl 1 125646.21644.291259233175174 @@ -566,7 +566,6 @@

    QF_UFBV (Single Query Track)

    - + - diff --git a/archive/2023/results/qf-ufbv-unsat-core.html b/archive/2023/results/qf-ufbv-unsat-core.html index 3d5f1159..d8255eb7 100644 --- a/archive/2023/results/qf-ufbv-unsat-core.html +++ b/archive/2023/results/qf-ufbv-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFBV (Unsat Core Track)

    Competition results for the QF_UFBV - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_UFBV (Unsat Core Track)

    Sequential PerformanceParallel Performance Yices2Yices2 - - + + @@ -126,7 +126,7 @@

    QF_UFBV (Unsat Core Track)

    - + 2022-Bitwuzlan 0 827563 @@ -137,7 +137,7 @@

    QF_UFBV (Unsat Core Track)

    - + Yices2 0 624070 @@ -148,7 +148,7 @@

    QF_UFBV (Unsat Core Track)

    - + Bitwuzla Fixedn 0 224917 @@ -159,7 +159,7 @@

    QF_UFBV (Unsat Core Track)

    - + Bitwuzla 0 224917 @@ -170,7 +170,7 @@

    QF_UFBV (Unsat Core Track)

    - + cvc5 0 3655 @@ -192,7 +192,7 @@

    QF_UFBV (Unsat Core Track)

    - + 2022-Bitwuzlan 0 8275639960.0549961.5472 @@ -201,7 +201,7 @@

    QF_UFBV (Unsat Core Track)

    - + Yices2 0 6240701280.0591281.04146 @@ -210,7 +210,7 @@

    QF_UFBV (Unsat Core Track)

    - + Bitwuzla Fixedn 0 22491711437.39611438.0722 @@ -219,7 +219,7 @@

    QF_UFBV (Unsat Core Track)

    - + Bitwuzla 0 22491711468.96311470.642 @@ -228,7 +228,7 @@

    QF_UFBV (Unsat Core Track)

    - + cvc5 0 365511606.40611554.7259 @@ -252,7 +252,6 @@

    QF_UFBV (Unsat Core Track)

    - + - diff --git a/archive/2023/results/qf-ufbvdt-proof-exhibition.html b/archive/2023/results/qf-ufbvdt-proof-exhibition.html index 8e2f192c..8c1706e2 100644 --- a/archive/2023/results/qf-ufbvdt-proof-exhibition.html +++ b/archive/2023/results/qf-ufbvdt-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFBVDT (Proof Exhibition Track)

    Competition results for the QF_UFBVDT - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_UFBVDT (Proof Exhibition Track)

    - + cvc5-lfsc 0 3 @@ -130,7 +130,7 @@

    QF_UFBVDT (Proof Exhibition Track)

    - + cvc5 0 0 @@ -152,7 +152,7 @@

    QF_UFBVDT (Proof Exhibition Track)

    - + cvc5-lfsc 0 3554.871554.48730 @@ -161,7 +161,7 @@

    QF_UFBVDT (Proof Exhibition Track)

    - + cvc5 0 00.00.063 @@ -185,7 +185,6 @@

    QF_UFBVDT (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/qf-ufbvdt-single-query.html b/archive/2023/results/qf-ufbvdt-single-query.html index 97182b3a..c1fd0d24 100644 --- a/archive/2023/results/qf-ufbvdt-single-query.html +++ b/archive/2023/results/qf-ufbvdt-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFBVDT (Single Query Track)

    Competition results for the QF_UFBVDT - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_UFBVDT (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    QF_UFBVDT (Single Query Track)

    - + cvc5 0 49 @@ -142,7 +142,7 @@

    QF_UFBVDT (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 0 0 @@ -164,7 +164,7 @@

    QF_UFBVDT (Single Query Track)

    - + cvc5 0 4910278.32510280.702494362726 @@ -173,7 +173,7 @@

    QF_UFBVDT (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 0 00.00.0000760 @@ -193,7 +193,7 @@

    QF_UFBVDT (Single Query Track)

    - + cvc5 0 439933.8689936.2134343003326 @@ -202,7 +202,7 @@

    QF_UFBVDT (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 0 00.00.000043330 @@ -222,7 +222,7 @@

    QF_UFBVDT (Single Query Track)

    - + cvc5 0 6344.457344.48960607026 @@ -231,7 +231,7 @@

    QF_UFBVDT (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 0 00.00.00006700 @@ -251,7 +251,7 @@

    QF_UFBVDT (Single Query Track)

    - + cvc5 0 19135.036135.061191455757 @@ -260,7 +260,7 @@

    QF_UFBVDT (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 0 00.00.0000760 @@ -284,7 +284,6 @@

    QF_UFBVDT (Single Query Track)

    - + - diff --git a/archive/2023/results/qf-ufbvlia-incremental.html b/archive/2023/results/qf-ufbvlia-incremental.html index e3430b14..8bc28993 100644 --- a/archive/2023/results/qf-ufbvlia-incremental.html +++ b/archive/2023/results/qf-ufbvlia-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFBVLIA (Incremental Track)

    Competition results for the QF_UFBVLIA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_UFBVLIA (Incremental Track)

    Parallel Performance Yices2 - - + + @@ -124,7 +124,7 @@

    QF_UFBVLIA (Incremental Track)

    - + Yices2 Fixedn 0 2076.4812.3100 @@ -133,7 +133,7 @@

    QF_UFBVLIA (Incremental Track)

    - + Yices2 0 2076.5312.5400 @@ -142,7 +142,7 @@

    QF_UFBVLIA (Incremental Track)

    - + cvc5 0 20739.057.400 @@ -166,7 +166,6 @@

    QF_UFBVLIA (Incremental Track)

    - + - diff --git a/archive/2023/results/qf-ufdt-proof-exhibition.html b/archive/2023/results/qf-ufdt-proof-exhibition.html index f26f29da..fd513d65 100644 --- a/archive/2023/results/qf-ufdt-proof-exhibition.html +++ b/archive/2023/results/qf-ufdt-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFDT (Proof Exhibition Track)

    Competition results for the QF_UFDT - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_UFDT (Proof Exhibition Track)

    - + SMTInterpol 0 18 @@ -130,7 +130,7 @@

    QF_UFDT (Proof Exhibition Track)

    - + cvc5 0 0 @@ -141,7 +141,7 @@

    QF_UFDT (Proof Exhibition Track)

    - + cvc5-lfsc 0 0 @@ -163,7 +163,7 @@

    QF_UFDT (Proof Exhibition Track)

    - + SMTInterpol 0 1821197.71514005.6368282 @@ -172,7 +172,7 @@

    QF_UFDT (Proof Exhibition Track)

    - + cvc5 0 00.00.0100100 @@ -181,7 +181,7 @@

    QF_UFDT (Proof Exhibition Track)

    - + cvc5-lfsc 0 00.00.0100100 @@ -205,7 +205,6 @@

    QF_UFDT (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/qf-ufdt-single-query.html b/archive/2023/results/qf-ufdt-single-query.html index 5dfd3554..fd81bc69 100644 --- a/archive/2023/results/qf-ufdt-single-query.html +++ b/archive/2023/results/qf-ufdt-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFDT (Single Query Track)

    Competition results for the QF_UFDT - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_UFDT (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + SMTInterpol - + @@ -131,7 +131,7 @@

    QF_UFDT (Single Query Track)

    - + 2022-z3-4.8.17n 0 98 @@ -142,7 +142,7 @@

    QF_UFDT (Single Query Track)

    - + cvc5 0 96 @@ -153,7 +153,7 @@

    QF_UFDT (Single Query Track)

    - + SMTInterpol 0 38 @@ -175,7 +175,7 @@

    QF_UFDT (Single Query Track)

    - + 2022-z3-4.8.17n 0 9835999.1536003.64598692102102 @@ -184,7 +184,7 @@

    QF_UFDT (Single Query Track)

    - + cvc5 0 9646826.61846778.053962274104104 @@ -193,7 +193,7 @@

    QF_UFDT (Single Query Track)

    - + SMTInterpol 0 4730552.15321593.211472126153153 @@ -213,7 +213,7 @@

    QF_UFDT (Single Query Track)

    - + cvc5 0 2210467.15410469.1212222078100104 @@ -222,7 +222,7 @@

    QF_UFDT (Single Query Track)

    - + SMTInterpol 0 217814.2077405.4162121079100153 @@ -231,7 +231,7 @@

    QF_UFDT (Single Query Track)

    - + 2022-z3-4.8.17n 0 63027.4983027.7766094100102 @@ -251,7 +251,7 @@

    QF_UFDT (Single Query Track)

    - + 2022-z3-4.8.17n 0 9232971.65232975.875920928100102 @@ -260,7 +260,7 @@

    QF_UFDT (Single Query Track)

    - + cvc5 0 7436359.46436308.9327407426100104 @@ -269,7 +269,7 @@

    QF_UFDT (Single Query Track)

    - + SMTInterpol 0 2622737.94514187.7952602674100153 @@ -289,7 +289,7 @@

    QF_UFDT (Single Query Track)

    - + SMTInterpol 0 379.91445.508330197197 @@ -298,7 +298,7 @@

    QF_UFDT (Single Query Track)

    - + 2022-z3-4.8.17n 0 123.82323.822101199199 @@ -307,7 +307,7 @@

    QF_UFDT (Single Query Track)

    - + cvc5 0 00.00.0000200200 @@ -331,7 +331,6 @@

    QF_UFDT (Single Query Track)

    - + - diff --git a/archive/2023/results/qf-ufdt-unsat-core.html b/archive/2023/results/qf-ufdt-unsat-core.html index eadb118f..72d72f9d 100644 --- a/archive/2023/results/qf-ufdt-unsat-core.html +++ b/archive/2023/results/qf-ufdt-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFDT (Unsat Core Track)

    Competition results for the QF_UFDT - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_UFDT (Unsat Core Track)

    Sequential PerformanceParallel Performance SMTInterpolSMTInterpol - - + + @@ -126,7 +126,7 @@

    QF_UFDT (Unsat Core Track)

    - + 2022-z3-4.8.17n 0 679843 @@ -137,7 +137,7 @@

    QF_UFDT (Unsat Core Track)

    - + SMTInterpol 0 111642 @@ -148,7 +148,7 @@

    QF_UFDT (Unsat Core Track)

    - + cvc5 0 101480 @@ -170,7 +170,7 @@

    QF_UFDT (Unsat Core Track)

    - + 2022-z3-4.8.17n 0 67984324795.8724806.12220 @@ -179,7 +179,7 @@

    QF_UFDT (Unsat Core Track)

    - + SMTInterpol 0 16553425052.14915455.37773 @@ -188,7 +188,7 @@

    QF_UFDT (Unsat Core Track)

    - + cvc5 0 10148013520.54113521.82978 @@ -212,7 +212,6 @@

    QF_UFDT (Unsat Core Track)

    - + - diff --git a/archive/2023/results/qf-ufdtlia-proof-exhibition.html b/archive/2023/results/qf-ufdtlia-proof-exhibition.html index 34da9941..36e6b23c 100644 --- a/archive/2023/results/qf-ufdtlia-proof-exhibition.html +++ b/archive/2023/results/qf-ufdtlia-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFDTLIA (Proof Exhibition Track)

    Competition results for the QF_UFDTLIA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_UFDTLIA (Proof Exhibition Track)

    - + SMTInterpol 0 10 @@ -130,7 +130,7 @@

    QF_UFDTLIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 6 @@ -141,7 +141,7 @@

    QF_UFDTLIA (Proof Exhibition Track)

    - + cvc5 0 0 @@ -163,7 +163,7 @@

    QF_UFDTLIA (Proof Exhibition Track)

    - + SMTInterpol 0 102466.362068.62911 @@ -172,7 +172,7 @@

    QF_UFDTLIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 61330.5521328.90455 @@ -181,7 +181,7 @@

    QF_UFDTLIA (Proof Exhibition Track)

    - + cvc5 0 00.00.01111 @@ -205,7 +205,6 @@

    QF_UFDTLIA (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/qf-ufdtlia-single-query.html b/archive/2023/results/qf-ufdtlia-single-query.html index b5a76187..c15ea58b 100644 --- a/archive/2023/results/qf-ufdtlia-single-query.html +++ b/archive/2023/results/qf-ufdtlia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFDTLIA (Single Query Track)

    Competition results for the QF_UFDTLIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_UFDTLIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) SMTInterpolSMTInterpolSMTInterpol - - + + SMTInterpol - - + + SMTInterpol - + @@ -131,7 +131,7 @@

    QF_UFDTLIA (Single Query Track)

    - + SMTInterpol 0 56 @@ -142,7 +142,7 @@

    QF_UFDTLIA (Single Query Track)

    - + cvc5 0 39 @@ -164,7 +164,7 @@

    QF_UFDTLIA (Single Query Track)

    - + SMTInterpol 0 5611378.1519420.5795646102020 @@ -173,7 +173,7 @@

    QF_UFDTLIA (Single Query Track)

    - + cvc5 0 397176.3037177.471393273737 @@ -193,7 +193,7 @@

    QF_UFDTLIA (Single Query Track)

    - + SMTInterpol 0 468969.0727372.2454646012920 @@ -202,7 +202,7 @@

    QF_UFDTLIA (Single Query Track)

    - + cvc5 0 326014.2286015.11532320152937 @@ -222,7 +222,7 @@

    QF_UFDTLIA (Single Query Track)

    - + SMTInterpol 0 102409.082048.3341001016520 @@ -231,7 +231,7 @@

    QF_UFDTLIA (Single Query Track)

    - + cvc5 0 71162.0741162.35670746537 @@ -251,7 +251,7 @@

    QF_UFDTLIA (Single Query Track)

    - + SMTInterpol 0 21532.791187.887211745555 @@ -260,7 +260,7 @@

    QF_UFDTLIA (Single Query Track)

    - + cvc5 0 16105.401105.426161336060 @@ -284,7 +284,6 @@

    QF_UFDTLIA (Single Query Track)

    - + - diff --git a/archive/2023/results/qf-ufdtlia-unsat-core.html b/archive/2023/results/qf-ufdtlia-unsat-core.html index 94a30294..d757b9cb 100644 --- a/archive/2023/results/qf-ufdtlia-unsat-core.html +++ b/archive/2023/results/qf-ufdtlia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFDTLIA (Unsat Core Track)

    Competition results for the QF_UFDTLIA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_UFDTLIA (Unsat Core Track)

    Sequential PerformanceParallel Performance SMTInterpolSMTInterpol - - + + @@ -126,7 +126,7 @@

    QF_UFDTLIA (Unsat Core Track)

    - + SMTInterpol 0 15442 @@ -137,7 +137,7 @@

    QF_UFDTLIA (Unsat Core Track)

    - + cvc5 0 8964 @@ -159,7 +159,7 @@

    QF_UFDTLIA (Unsat Core Track)

    - + SMTInterpol 0 154422396.0072032.4451 @@ -168,7 +168,7 @@

    QF_UFDTLIA (Unsat Core Track)

    - + cvc5 0 8964300.27300.4165 @@ -192,7 +192,6 @@

    QF_UFDTLIA (Unsat Core Track)

    - + - diff --git a/archive/2023/results/qf-ufdtlira-proof-exhibition.html b/archive/2023/results/qf-ufdtlira-proof-exhibition.html index 884391eb..b2694ca6 100644 --- a/archive/2023/results/qf-ufdtlira-proof-exhibition.html +++ b/archive/2023/results/qf-ufdtlira-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFDTLIRA (Proof Exhibition Track)

    Competition results for the QF_UFDTLIRA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_UFDTLIRA (Proof Exhibition Track)

    - + cvc5 0 66 @@ -130,7 +130,7 @@

    QF_UFDTLIRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 66 @@ -141,7 +141,7 @@

    QF_UFDTLIRA (Proof Exhibition Track)

    - + SMTInterpol 0 66 @@ -163,7 +163,7 @@

    QF_UFDTLIRA (Proof Exhibition Track)

    - + cvc5 0 662.8072.73600 @@ -172,7 +172,7 @@

    QF_UFDTLIRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 663.133.0600 @@ -181,7 +181,7 @@

    QF_UFDTLIRA (Proof Exhibition Track)

    - + SMTInterpol 0 6676.28950.70600 @@ -205,7 +205,6 @@

    QF_UFDTLIRA (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/qf-ufdtlira-single-query.html b/archive/2023/results/qf-ufdtlira-single-query.html index 203a8ed3..f228f403 100644 --- a/archive/2023/results/qf-ufdtlira-single-query.html +++ b/archive/2023/results/qf-ufdtlira-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFDTLIRA (Single Query Track)

    Competition results for the QF_UFDTLIRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_UFDTLIRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    QF_UFDTLIRA (Single Query Track)

    - + cvc5 0 9 @@ -142,7 +142,7 @@

    QF_UFDTLIRA (Single Query Track)

    - + 2022-z3-4.8.17n 0 9 @@ -153,7 +153,7 @@

    QF_UFDTLIRA (Single Query Track)

    - + SMTInterpol 0 9 @@ -175,7 +175,7 @@

    QF_UFDTLIRA (Single Query Track)

    - + cvc5 0 90.20.19595400 @@ -184,7 +184,7 @@

    QF_UFDTLIRA (Single Query Track)

    - + 2022-z3-4.8.17n 0 90.3240.30795400 @@ -193,7 +193,7 @@

    QF_UFDTLIRA (Single Query Track)

    - + SMTInterpol 0 95.9563.82595400 @@ -213,7 +213,7 @@

    QF_UFDTLIRA (Single Query Track)

    - + cvc5 0 50.1170.115550040 @@ -222,7 +222,7 @@

    QF_UFDTLIRA (Single Query Track)

    - + 2022-z3-4.8.17n 0 50.1870.177550040 @@ -231,7 +231,7 @@

    QF_UFDTLIRA (Single Query Track)

    - + SMTInterpol 0 53.1862.034550040 @@ -251,7 +251,7 @@

    QF_UFDTLIRA (Single Query Track)

    - + cvc5 0 40.0830.081404050 @@ -260,7 +260,7 @@

    QF_UFDTLIRA (Single Query Track)

    - + 2022-z3-4.8.17n 0 40.1370.13404050 @@ -269,7 +269,7 @@

    QF_UFDTLIRA (Single Query Track)

    - + SMTInterpol 0 42.771.792404050 @@ -289,7 +289,7 @@

    QF_UFDTLIRA (Single Query Track)

    - + cvc5 0 90.20.19595400 @@ -298,7 +298,7 @@

    QF_UFDTLIRA (Single Query Track)

    - + 2022-z3-4.8.17n 0 90.3240.30795400 @@ -307,7 +307,7 @@

    QF_UFDTLIRA (Single Query Track)

    - + SMTInterpol 0 95.9563.82595400 @@ -331,7 +331,6 @@

    QF_UFDTLIRA (Single Query Track)

    - + - diff --git a/archive/2023/results/qf-ufdtlira-unsat-core.html b/archive/2023/results/qf-ufdtlira-unsat-core.html index 32866236..bb286a02 100644 --- a/archive/2023/results/qf-ufdtlira-unsat-core.html +++ b/archive/2023/results/qf-ufdtlira-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFDTLIRA (Unsat Core Track)

    Competition results for the QF_UFDTLIRA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_UFDTLIRA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    QF_UFDTLIRA (Unsat Core Track)

    - + cvc5 0 162 @@ -137,7 +137,7 @@

    QF_UFDTLIRA (Unsat Core Track)

    - + SMTInterpol 0 162 @@ -159,7 +159,7 @@

    QF_UFDTLIRA (Unsat Core Track)

    - + cvc5 0 1620.340.3320 @@ -168,7 +168,7 @@

    QF_UFDTLIRA (Unsat Core Track)

    - + SMTInterpol 0 16210.797.2070 @@ -192,7 +192,6 @@

    QF_UFDTLIRA (Unsat Core Track)

    - + - diff --git a/archive/2023/results/qf-uffp-incremental.html b/archive/2023/results/qf-uffp-incremental.html index 910540b9..34045dc5 100644 --- a/archive/2023/results/qf-uffp-incremental.html +++ b/archive/2023/results/qf-uffp-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFFP (Incremental Track)

    Competition results for the QF_UFFP - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_UFFP (Incremental Track)

    Parallel Performance Bitwuzla - - + + @@ -124,7 +124,7 @@

    QF_UFFP (Incremental Track)

    - + 2022-Bitwuzlan 0 20.00.1200 @@ -133,7 +133,7 @@

    QF_UFFP (Incremental Track)

    - + Bitwuzla 0 20.00.200 @@ -142,7 +142,7 @@

    QF_UFFP (Incremental Track)

    - + cvc5 0 20.00.5200 @@ -166,7 +166,6 @@

    QF_UFFP (Incremental Track)

    - + - diff --git a/archive/2023/results/qf-uffp-proof-exhibition.html b/archive/2023/results/qf-uffp-proof-exhibition.html index a69c02d3..3449715e 100644 --- a/archive/2023/results/qf-uffp-proof-exhibition.html +++ b/archive/2023/results/qf-uffp-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFFP (Proof Exhibition Track)

    Competition results for the QF_UFFP - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_UFFP (Proof Exhibition Track)

    - + cvc5 0 0 @@ -130,7 +130,7 @@

    QF_UFFP (Proof Exhibition Track)

    - + cvc5-lfsc 0 0 @@ -152,7 +152,7 @@

    QF_UFFP (Proof Exhibition Track)

    - + cvc5 0 00.00.022 @@ -161,7 +161,7 @@

    QF_UFFP (Proof Exhibition Track)

    - + cvc5-lfsc 0 00.00.022 @@ -185,7 +185,6 @@

    QF_UFFP (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/qf-uffpdtnira-proof-exhibition.html b/archive/2023/results/qf-uffpdtnira-proof-exhibition.html index c0b5f30b..9b1cb63e 100644 --- a/archive/2023/results/qf-uffpdtnira-proof-exhibition.html +++ b/archive/2023/results/qf-uffpdtnira-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFFPDTNIRA (Proof Exhibition Track)

    Competition results for the QF_UFFPDTNIRA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_UFFPDTNIRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 107 @@ -130,7 +130,7 @@

    QF_UFFPDTNIRA (Proof Exhibition Track)

    - + cvc5 0 47 @@ -152,7 +152,7 @@

    QF_UFFPDTNIRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 1072499.0972440.5392121 @@ -161,7 +161,7 @@

    QF_UFFPDTNIRA (Proof Exhibition Track)

    - + cvc5 0 474.9054.8558180 @@ -185,7 +185,6 @@

    QF_UFFPDTNIRA (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/qf-ufidl-model-validation.html b/archive/2023/results/qf-ufidl-model-validation.html index d132efe4..97b92210 100644 --- a/archive/2023/results/qf-ufidl-model-validation.html +++ b/archive/2023/results/qf-ufidl-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFIDL (Model Validation Track)

    Competition results for the QF_UFIDL - + logic - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_UFIDL (Model Validation Track)

    Sequential PerformanceParallel Performance OpenSMTOpenSMT - - + + @@ -126,7 +126,7 @@

    QF_UFIDL (Model Validation Track)

    - + OpenSMT 0 199 @@ -137,7 +137,7 @@

    QF_UFIDL (Model Validation Track)

    - + 2022-smtinterpoln 0 197 @@ -148,7 +148,7 @@

    QF_UFIDL (Model Validation Track)

    - + SMTInterpol 0 182 @@ -159,7 +159,7 @@

    QF_UFIDL (Model Validation Track)

    - + cvc5 0 169 @@ -170,7 +170,7 @@

    QF_UFIDL (Model Validation Track)

    - + Yices2 0 141 @@ -192,7 +192,7 @@

    QF_UFIDL (Model Validation Track)

    - + OpenSMT 0 1998353.1098354.3327 @@ -201,7 +201,7 @@

    QF_UFIDL (Model Validation Track)

    - + 2022-smtinterpoln 0 19711652.32510079.5569 @@ -210,7 +210,7 @@

    QF_UFIDL (Model Validation Track)

    - + SMTInterpol 0 18317312.49415836.46123 @@ -219,7 +219,7 @@

    QF_UFIDL (Model Validation Track)

    - + cvc5 0 16925679.49825684.50237 @@ -228,7 +228,7 @@

    QF_UFIDL (Model Validation Track)

    - + Yices2 0 14116968.84916969.5265 @@ -252,7 +252,6 @@

    QF_UFIDL (Model Validation Track)

    - + - diff --git a/archive/2023/results/qf-ufidl-proof-exhibition.html b/archive/2023/results/qf-ufidl-proof-exhibition.html index b2a44617..4b935c84 100644 --- a/archive/2023/results/qf-ufidl-proof-exhibition.html +++ b/archive/2023/results/qf-ufidl-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFIDL (Proof Exhibition Track)

    Competition results for the QF_UFIDL - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_UFIDL (Proof Exhibition Track)

    - + SMTInterpol 0 98 @@ -130,7 +130,7 @@

    QF_UFIDL (Proof Exhibition Track)

    - + cvc5-lfsc 0 80 @@ -141,7 +141,7 @@

    QF_UFIDL (Proof Exhibition Track)

    - + cvc5 0 6 @@ -163,7 +163,7 @@

    QF_UFIDL (Proof Exhibition Track)

    - + SMTInterpol 0 985722.8643671.0275252 @@ -172,7 +172,7 @@

    QF_UFIDL (Proof Exhibition Track)

    - + cvc5-lfsc 0 8013499.93513495.1647070 @@ -181,7 +181,7 @@

    QF_UFIDL (Proof Exhibition Track)

    - + cvc5 0 6374.557371.463144144 @@ -205,7 +205,6 @@

    QF_UFIDL (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/qf-ufidl-single-query.html b/archive/2023/results/qf-ufidl-single-query.html index bea7f23f..9ab5d851 100644 --- a/archive/2023/results/qf-ufidl-single-query.html +++ b/archive/2023/results/qf-ufidl-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFIDL (Single Query Track)

    Competition results for the QF_UFIDL - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_UFIDL (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) OpenSMTOpenSMTOpenSMT - - + + OpenSMT - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_UFIDL (Single Query Track)

    - + OpenSMT 0 277 @@ -142,7 +142,7 @@

    QF_UFIDL (Single Query Track)

    - + 2022-z3-4.8.17n 0 259 @@ -153,7 +153,7 @@

    QF_UFIDL (Single Query Track)

    - + cvc5 0 238 @@ -164,7 +164,7 @@

    QF_UFIDL (Single Query Track)

    - + Yices2 0 236 @@ -175,7 +175,7 @@

    QF_UFIDL (Single Query Track)

    - + SMTInterpol 0 224 @@ -197,7 +197,7 @@

    QF_UFIDL (Single Query Track)

    - + OpenSMT 0 27722420.78422415.854277981792323 @@ -206,7 +206,7 @@

    QF_UFIDL (Single Query Track)

    - + 2022-z3-4.8.17n 0 25915625.63815624.384259911684141 @@ -215,7 +215,7 @@

    QF_UFIDL (Single Query Track)

    - + cvc5 0 23835820.9335764.468238861526262 @@ -224,7 +224,7 @@

    QF_UFIDL (Single Query Track)

    - + Yices2 0 23620403.14920403.72236661706464 @@ -233,7 +233,7 @@

    QF_UFIDL (Single Query Track)

    - + SMTInterpol 0 23128164.60120736.044231921396969 @@ -253,7 +253,7 @@

    QF_UFIDL (Single Query Track)

    - + OpenSMT 0 984722.8124723.54998980419823 @@ -262,7 +262,7 @@

    QF_UFIDL (Single Query Track)

    - + SMTInterpol 0 9212205.0911406.105929201019869 @@ -271,7 +271,7 @@

    QF_UFIDL (Single Query Track)

    - + 2022-z3-4.8.17n 0 912532.2372531.291919101119841 @@ -280,7 +280,7 @@

    QF_UFIDL (Single Query Track)

    - + cvc5 0 8616303.81716307.02868601619862 @@ -289,7 +289,7 @@

    QF_UFIDL (Single Query Track)

    - + Yices2 0 6610209.30310209.077666603619864 @@ -309,7 +309,7 @@

    QF_UFIDL (Single Query Track)

    - + OpenSMT 0 17917697.97217692.30517901791910223 @@ -318,7 +318,7 @@

    QF_UFIDL (Single Query Track)

    - + Yices2 0 17010193.84610194.64217001702810264 @@ -327,7 +327,7 @@

    QF_UFIDL (Single Query Track)

    - + 2022-z3-4.8.17n 0 16813093.40113093.09216801683010241 @@ -336,7 +336,7 @@

    QF_UFIDL (Single Query Track)

    - + cvc5 0 15219517.11319457.44915201524610262 @@ -345,7 +345,7 @@

    QF_UFIDL (Single Query Track)

    - + SMTInterpol 0 13915959.5119329.93913901395910269 @@ -365,7 +365,7 @@

    QF_UFIDL (Single Query Track)

    - + 2022-z3-4.8.17n 0 227316.366313.881227861417373 @@ -374,7 +374,7 @@

    QF_UFIDL (Single Query Track)

    - + Yices2 0 193129.156127.90219348145107107 @@ -383,7 +383,7 @@

    QF_UFIDL (Single Query Track)

    - + OpenSMT 0 186853.703846.07318673113114114 @@ -392,7 +392,7 @@

    QF_UFIDL (Single Query Track)

    - + SMTInterpol 0 1662288.336943.82416656110134134 @@ -401,7 +401,7 @@

    QF_UFIDL (Single Query Track)

    - + cvc5 0 136430.529430.6571364888164164 @@ -425,7 +425,6 @@

    QF_UFIDL (Single Query Track)

    - + - diff --git a/archive/2023/results/qf-ufidl-unsat-core.html b/archive/2023/results/qf-ufidl-unsat-core.html index 6390c333..b1a312be 100644 --- a/archive/2023/results/qf-ufidl-unsat-core.html +++ b/archive/2023/results/qf-ufidl-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFIDL (Unsat Core Track)

    Competition results for the QF_UFIDL - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_UFIDL (Unsat Core Track)

    Sequential PerformanceParallel Performance Yices2Yices2 - - + + @@ -126,7 +126,7 @@

    QF_UFIDL (Unsat Core Track)

    - + 2022-Yices2n 0 675342 @@ -137,7 +137,7 @@

    QF_UFIDL (Unsat Core Track)

    - + Yices2 0 631468 @@ -148,7 +148,7 @@

    QF_UFIDL (Unsat Core Track)

    - + cvc5 0 62360 @@ -159,7 +159,7 @@

    QF_UFIDL (Unsat Core Track)

    - + SMTInterpol 0 48510 @@ -181,7 +181,7 @@

    QF_UFIDL (Unsat Core Track)

    - + 2022-Yices2n 0 67534225970.99525974.86443 @@ -190,7 +190,7 @@

    QF_UFIDL (Unsat Core Track)

    - + Yices2 0 63146824813.55924817.146 @@ -199,7 +199,7 @@

    QF_UFIDL (Unsat Core Track)

    - + cvc5 0 623605285.5615286.08694 @@ -208,7 +208,7 @@

    QF_UFIDL (Unsat Core Track)

    - + SMTInterpol 0 545727627.944226.36393 @@ -232,7 +232,6 @@

    QF_UFIDL (Unsat Core Track)

    - + - diff --git a/archive/2023/results/qf-uflia-incremental.html b/archive/2023/results/qf-uflia-incremental.html index 062f06e9..63157faa 100644 --- a/archive/2023/results/qf-uflia-incremental.html +++ b/archive/2023/results/qf-uflia-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFLIA (Incremental Track)

    Competition results for the QF_UFLIA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_UFLIA (Incremental Track)

    Parallel Performance Yices2 - - + + @@ -124,7 +124,7 @@

    QF_UFLIA (Incremental Track)

    - + 2020-z3n 0 48872881113.4481126.62643266 @@ -133,7 +133,7 @@

    QF_UFLIA (Incremental Track)

    - + Yices2 Fixedn 0 48497570508.0570511.441018571 @@ -142,7 +142,7 @@

    QF_UFLIA (Incremental Track)

    - + Yices2 0 48496570820.7870822.791019571 @@ -151,7 +151,7 @@

    QF_UFLIA (Incremental Track)

    - + SMTInterpol 0 484796107170.7891615.931036489 @@ -160,7 +160,7 @@

    QF_UFLIA (Incremental Track)

    - + cvc5 0 39501698760.098790.1110014482 @@ -169,7 +169,7 @@

    QF_UFLIA (Incremental Track)

    - + OpenSMT 0 21488102314.78102352.4847367288 @@ -193,7 +193,6 @@

    QF_UFLIA (Incremental Track)

    - + - diff --git a/archive/2023/results/qf-uflia-model-validation.html b/archive/2023/results/qf-uflia-model-validation.html index 53a12a04..7224609d 100644 --- a/archive/2023/results/qf-uflia-model-validation.html +++ b/archive/2023/results/qf-uflia-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFLIA (Model Validation Track)

    Competition results for the QF_UFLIA - + logic - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_UFLIA (Model Validation Track)

    Sequential PerformanceParallel Performance SMTInterpolSMTInterpol - - + + @@ -126,7 +126,7 @@

    QF_UFLIA (Model Validation Track)

    - + SMTInterpol 0 330 @@ -137,7 +137,7 @@

    QF_UFLIA (Model Validation Track)

    - + 2022-smtinterpoln 0 329 @@ -148,7 +148,7 @@

    QF_UFLIA (Model Validation Track)

    - + Yices2 0 320 @@ -159,7 +159,7 @@

    QF_UFLIA (Model Validation Track)

    - + cvc5 0 319 @@ -170,7 +170,7 @@

    QF_UFLIA (Model Validation Track)

    - + OpenSMT 0 316 @@ -192,7 +192,7 @@

    QF_UFLIA (Model Validation Track)

    - + SMTInterpol 0 3307136.3225474.380 @@ -201,7 +201,7 @@

    QF_UFLIA (Model Validation Track)

    - + 2022-smtinterpoln 0 3307319.0565618.280 @@ -210,7 +210,7 @@

    QF_UFLIA (Model Validation Track)

    - + Yices2 0 3202884.5172886.38810 @@ -219,7 +219,7 @@

    QF_UFLIA (Model Validation Track)

    - + cvc5 0 3192125.7482126.01811 @@ -228,7 +228,7 @@

    QF_UFLIA (Model Validation Track)

    - + OpenSMT 0 3162808.7132802.9614 @@ -252,7 +252,6 @@

    QF_UFLIA (Model Validation Track)

    - + - diff --git a/archive/2023/results/qf-uflia-proof-exhibition.html b/archive/2023/results/qf-uflia-proof-exhibition.html index 1ae446ff..66facb20 100644 --- a/archive/2023/results/qf-uflia-proof-exhibition.html +++ b/archive/2023/results/qf-uflia-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFLIA (Proof Exhibition Track)

    Competition results for the QF_UFLIA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_UFLIA (Proof Exhibition Track)

    - + SMTInterpol 0 158 @@ -130,7 +130,7 @@

    QF_UFLIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 153 @@ -141,7 +141,7 @@

    QF_UFLIA (Proof Exhibition Track)

    - + cvc5 0 79 @@ -163,7 +163,7 @@

    QF_UFLIA (Proof Exhibition Track)

    - + SMTInterpol 0 1584696.4233749.32422 @@ -172,7 +172,7 @@

    QF_UFLIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 1532154.4022133.69577 @@ -181,7 +181,7 @@

    QF_UFLIA (Proof Exhibition Track)

    - + cvc5 0 791587.4491575.2768181 @@ -205,7 +205,6 @@

    QF_UFLIA (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/qf-uflia-single-query.html b/archive/2023/results/qf-uflia-single-query.html index e69cc76e..1024537d 100644 --- a/archive/2023/results/qf-uflia-single-query.html +++ b/archive/2023/results/qf-uflia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFLIA (Single Query Track)

    Competition results for the QF_UFLIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_UFLIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) SMTInterpolSMTInterpolSMTInterpol - - + + Yices2 - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_UFLIA (Single Query Track)

    - + SMTInterpol 0 290 @@ -142,7 +142,7 @@

    QF_UFLIA (Single Query Track)

    - + Yices2 0 280 @@ -153,7 +153,7 @@

    QF_UFLIA (Single Query Track)

    - + 2022-z3-4.8.17n 0 275 @@ -164,7 +164,7 @@

    QF_UFLIA (Single Query Track)

    - + cvc5 0 275 @@ -175,7 +175,7 @@

    QF_UFLIA (Single Query Track)

    - + OpenSMT 0 271 @@ -197,7 +197,7 @@

    QF_UFLIA (Single Query Track)

    - + SMTInterpol 0 29011673.0859494.785290222681010 @@ -206,7 +206,7 @@

    QF_UFLIA (Single Query Track)

    - + Yices2 0 2804815.5044817.035280211692020 @@ -215,7 +215,7 @@

    QF_UFLIA (Single Query Track)

    - + 2022-z3-4.8.17n 0 275625.872623.9275208672525 @@ -224,7 +224,7 @@

    QF_UFLIA (Single Query Track)

    - + cvc5 0 2753956.7433953.404275209662525 @@ -233,7 +233,7 @@

    QF_UFLIA (Single Query Track)

    - + OpenSMT 0 2714001.4623971.24271207642929 @@ -253,7 +253,7 @@

    QF_UFLIA (Single Query Track)

    - + SMTInterpol 0 2227449.7775956.137222222007810 @@ -262,7 +262,7 @@

    QF_UFLIA (Single Query Track)

    - + Yices2 0 2112850.52851.4242112110117820 @@ -271,7 +271,7 @@

    QF_UFLIA (Single Query Track)

    - + cvc5 0 2091751.271747.3182092090137825 @@ -280,7 +280,7 @@

    QF_UFLIA (Single Query Track)

    - + 2022-z3-4.8.17n 0 208227.805227.4532082080147825 @@ -289,7 +289,7 @@

    QF_UFLIA (Single Query Track)

    - + OpenSMT 0 2072651.1482620.7052072070157829 @@ -309,7 +309,7 @@

    QF_UFLIA (Single Query Track)

    - + Yices2 0 691965.0051965.61169069222920 @@ -318,7 +318,7 @@

    QF_UFLIA (Single Query Track)

    - + SMTInterpol 0 684223.3073538.64868068322910 @@ -327,7 +327,7 @@

    QF_UFLIA (Single Query Track)

    - + 2022-z3-4.8.17n 0 67398.067396.44767067422925 @@ -336,7 +336,7 @@

    QF_UFLIA (Single Query Track)

    - + cvc5 0 662205.4732206.08666066522925 @@ -345,7 +345,7 @@

    QF_UFLIA (Single Query Track)

    - + OpenSMT 0 641350.3141350.53664064722929 @@ -365,7 +365,7 @@

    QF_UFLIA (Single Query Track)

    - + Yices2 0 268114.361114.928268201673232 @@ -374,7 +374,7 @@

    QF_UFLIA (Single Query Track)

    - + 2022-z3-4.8.17n 0 264127.928125.914264203613636 @@ -383,7 +383,7 @@

    QF_UFLIA (Single Query Track)

    - + cvc5 0 259293.245289.142259202574141 @@ -392,7 +392,7 @@

    QF_UFLIA (Single Query Track)

    - + SMTInterpol 0 2581251.71485.006258201574242 @@ -401,7 +401,7 @@

    QF_UFLIA (Single Query Track)

    - + OpenSMT 0 257573.537542.843257195624343 @@ -425,7 +425,6 @@

    QF_UFLIA (Single Query Track)

    - + - diff --git a/archive/2023/results/qf-uflia-unsat-core.html b/archive/2023/results/qf-uflia-unsat-core.html index 6e834223..c16462aa 100644 --- a/archive/2023/results/qf-uflia-unsat-core.html +++ b/archive/2023/results/qf-uflia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFLIA (Unsat Core Track)

    Competition results for the QF_UFLIA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_UFLIA (Unsat Core Track)

    Sequential PerformanceParallel Performance Yices2Yices2 - - + + @@ -126,7 +126,7 @@

    QF_UFLIA (Unsat Core Track)

    - + Yices2 0 398048 @@ -137,7 +137,7 @@

    QF_UFLIA (Unsat Core Track)

    - + 2022-Yices2n 0 398048 @@ -148,7 +148,7 @@

    QF_UFLIA (Unsat Core Track)

    - + cvc5 0 364807 @@ -159,7 +159,7 @@

    QF_UFLIA (Unsat Core Track)

    - + SMTInterpol 0 250228 @@ -181,7 +181,7 @@

    QF_UFLIA (Unsat Core Track)

    - + Yices2 0 398048514.975515.2642 @@ -190,7 +190,7 @@

    QF_UFLIA (Unsat Core Track)

    - + 2022-Yices2n 0 398048519.361519.6142 @@ -199,7 +199,7 @@

    QF_UFLIA (Unsat Core Track)

    - + cvc5 0 3648072919.7082920.3333 @@ -208,7 +208,7 @@

    QF_UFLIA (Unsat Core Track)

    - + SMTInterpol 0 2502283317.4622936.3012 @@ -232,7 +232,6 @@

    QF_UFLIA (Unsat Core Track)

    - + - diff --git a/archive/2023/results/qf-uflra-incremental.html b/archive/2023/results/qf-uflra-incremental.html index 647583ed..3e1f6cef 100644 --- a/archive/2023/results/qf-uflra-incremental.html +++ b/archive/2023/results/qf-uflra-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFLRA (Incremental Track)

    Competition results for the QF_UFLRA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_UFLRA (Incremental Track)

    Parallel Performance Yices2 - - + + @@ -124,7 +124,7 @@

    QF_UFLRA (Incremental Track)

    - + 2020-z3n 0 852515160.3414971.1610 @@ -133,7 +133,7 @@

    QF_UFLRA (Incremental Track)

    - + Yices2 Fixedn 0 850915530.1315325.45174 @@ -142,7 +142,7 @@

    QF_UFLRA (Incremental Track)

    - + Yices2 0 850915821.1915608.04174 @@ -151,7 +151,7 @@

    QF_UFLRA (Incremental Track)

    - + cvc5 0 848936358.7936019.833717 @@ -160,7 +160,7 @@

    QF_UFLRA (Incremental Track)

    - + OpenSMT 0 841051285.5951049.5911641 @@ -169,7 +169,7 @@

    QF_UFLRA (Incremental Track)

    - + SMTInterpol 0 834472544.6152036.518230 @@ -193,7 +193,6 @@

    QF_UFLRA (Incremental Track)

    - + - diff --git a/archive/2023/results/qf-uflra-model-validation.html b/archive/2023/results/qf-uflra-model-validation.html index aa05f755..94c12271 100644 --- a/archive/2023/results/qf-uflra-model-validation.html +++ b/archive/2023/results/qf-uflra-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFLRA (Model Validation Track)

    Competition results for the QF_UFLRA - + logic - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_UFLRA (Model Validation Track)

    Sequential PerformanceParallel Performance Yices2Yices2 - - + + @@ -126,7 +126,7 @@

    QF_UFLRA (Model Validation Track)

    - + Yices2 0 383 @@ -137,7 +137,7 @@

    QF_UFLRA (Model Validation Track)

    - + SMTInterpol 0 383 @@ -148,7 +148,7 @@

    QF_UFLRA (Model Validation Track)

    - + 2022-smtinterpoln 0 383 @@ -159,7 +159,7 @@

    QF_UFLRA (Model Validation Track)

    - + cvc5 0 382 @@ -170,7 +170,7 @@

    QF_UFLRA (Model Validation Track)

    - + OpenSMT 0 378 @@ -192,7 +192,7 @@

    QF_UFLRA (Model Validation Track)

    - + Yices2 0 383535.614536.1892 @@ -201,7 +201,7 @@

    QF_UFLRA (Model Validation Track)

    - + SMTInterpol 0 3832280.0091023.7692 @@ -210,7 +210,7 @@

    QF_UFLRA (Model Validation Track)

    - + 2022-smtinterpoln 0 3832467.1841108.1212 @@ -219,7 +219,7 @@

    QF_UFLRA (Model Validation Track)

    - + cvc5 0 3822339.6952338.1923 @@ -228,7 +228,7 @@

    QF_UFLRA (Model Validation Track)

    - + OpenSMT 0 378999.489998.8317 @@ -252,7 +252,6 @@

    QF_UFLRA (Model Validation Track)

    - + - diff --git a/archive/2023/results/qf-uflra-proof-exhibition.html b/archive/2023/results/qf-uflra-proof-exhibition.html index f31af59f..2ef53368 100644 --- a/archive/2023/results/qf-uflra-proof-exhibition.html +++ b/archive/2023/results/qf-uflra-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFLRA (Proof Exhibition Track)

    Competition results for the QF_UFLRA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_UFLRA (Proof Exhibition Track)

    - + SMTInterpol 0 148 @@ -130,7 +130,7 @@

    QF_UFLRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 146 @@ -141,7 +141,7 @@

    QF_UFLRA (Proof Exhibition Track)

    - + cvc5 0 10 @@ -163,7 +163,7 @@

    QF_UFLRA (Proof Exhibition Track)

    - + SMTInterpol 0 1481468.401622.35422 @@ -172,7 +172,7 @@

    QF_UFLRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 146555.056553.50644 @@ -181,7 +181,7 @@

    QF_UFLRA (Proof Exhibition Track)

    - + cvc5 0 10257.206254.932140139 @@ -205,7 +205,6 @@

    QF_UFLRA (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/qf-uflra-single-query.html b/archive/2023/results/qf-uflra-single-query.html index 8125d604..3a80e73b 100644 --- a/archive/2023/results/qf-uflra-single-query.html +++ b/archive/2023/results/qf-uflra-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFLRA (Single Query Track)

    Competition results for the QF_UFLRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_UFLRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Yices2Yices2Yices2 - - + + Yices2 - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_UFLRA (Single Query Track)

    - + Yices2 0 537 @@ -142,7 +142,7 @@

    QF_UFLRA (Single Query Track)

    - + SMTInterpol 0 537 @@ -153,7 +153,7 @@

    QF_UFLRA (Single Query Track)

    - + cvc5 0 535 @@ -164,7 +164,7 @@

    QF_UFLRA (Single Query Track)

    - + OpenSMT 0 532 @@ -175,7 +175,7 @@

    QF_UFLRA (Single Query Track)

    - + 2022-z3-4.8.17n 0 531 @@ -197,7 +197,7 @@

    QF_UFLRA (Single Query Track)

    - + Yices2 0 537363.427361.28453732021744 @@ -206,7 +206,7 @@

    QF_UFLRA (Single Query Track)

    - + SMTInterpol 0 5375012.2812646.25453732021744 @@ -215,7 +215,7 @@

    QF_UFLRA (Single Query Track)

    - + cvc5 0 535810.619803.16753531821766 @@ -224,7 +224,7 @@

    QF_UFLRA (Single Query Track)

    - + OpenSMT 0 5322187.3832187.73453231521799 @@ -233,7 +233,7 @@

    QF_UFLRA (Single Query Track)

    - + 2022-z3-4.8.17n 0 5311576.2981574.8015313162151010 @@ -253,7 +253,7 @@

    QF_UFLRA (Single Query Track)

    - + Yices2 0 320344.709345.064320320032184 @@ -262,7 +262,7 @@

    QF_UFLRA (Single Query Track)

    - + SMTInterpol 0 3202918.6491639.745320320032184 @@ -271,7 +271,7 @@

    QF_UFLRA (Single Query Track)

    - + cvc5 0 318580.137580.179318318052186 @@ -280,7 +280,7 @@

    QF_UFLRA (Single Query Track)

    - + 2022-z3-4.8.17n 0 3161207.3361207.2043163160721810 @@ -289,7 +289,7 @@

    QF_UFLRA (Single Query Track)

    - + OpenSMT 0 3152103.272103.589315315082189 @@ -309,7 +309,7 @@

    QF_UFLRA (Single Query Track)

    - + Yices2 0 21718.71816.22217021703244 @@ -318,7 +318,7 @@

    QF_UFLRA (Single Query Track)

    - + OpenSMT 0 21784.11484.145217021703249 @@ -327,7 +327,7 @@

    QF_UFLRA (Single Query Track)

    - + cvc5 0 217230.483222.988217021703246 @@ -336,7 +336,7 @@

    QF_UFLRA (Single Query Track)

    - + SMTInterpol 0 2172093.6321006.509217021703244 @@ -345,7 +345,7 @@

    QF_UFLRA (Single Query Track)

    - + 2022-z3-4.8.17n 0 215368.962367.5972150215232410 @@ -365,7 +365,7 @@

    QF_UFLRA (Single Query Track)

    - + Yices2 0 53548.44646.29353531821766 @@ -374,7 +374,7 @@

    QF_UFLRA (Single Query Track)

    - + cvc5 0 529160.745153.1285293142151212 @@ -383,7 +383,7 @@

    QF_UFLRA (Single Query Track)

    - + SMTInterpol 0 5262317.157834.7515263122141515 @@ -392,7 +392,7 @@

    QF_UFLRA (Single Query Track)

    - + 2022-z3-4.8.17n 0 522181.069179.1325223122101919 @@ -401,7 +401,7 @@

    QF_UFLRA (Single Query Track)

    - + OpenSMT 0 521330.525330.6255213062152020 @@ -425,7 +425,6 @@

    QF_UFLRA (Single Query Track)

    - + - diff --git a/archive/2023/results/qf-uflra-unsat-core.html b/archive/2023/results/qf-uflra-unsat-core.html index fefbb3ec..f29c80dd 100644 --- a/archive/2023/results/qf-uflra-unsat-core.html +++ b/archive/2023/results/qf-uflra-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFLRA (Unsat Core Track)

    Competition results for the QF_UFLRA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_UFLRA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    QF_UFLRA (Unsat Core Track)

    - + cvc5 0 65 @@ -137,7 +137,7 @@

    QF_UFLRA (Unsat Core Track)

    - + 2022-Yices2n 0 58 @@ -148,7 +148,7 @@

    QF_UFLRA (Unsat Core Track)

    - + Yices2 0 58 @@ -159,7 +159,7 @@

    QF_UFLRA (Unsat Core Track)

    - + SMTInterpol 0 57 @@ -181,7 +181,7 @@

    QF_UFLRA (Unsat Core Track)

    - + cvc5 0 651011.1771011.4090 @@ -190,7 +190,7 @@

    QF_UFLRA (Unsat Core Track)

    - + 2022-Yices2n 0 5820.88221.2270 @@ -199,7 +199,7 @@

    QF_UFLRA (Unsat Core Track)

    - + Yices2 0 5821.01721.3180 @@ -208,7 +208,7 @@

    QF_UFLRA (Unsat Core Track)

    - + SMTInterpol 0 571670.462987.7540 @@ -232,7 +232,6 @@

    QF_UFLRA (Unsat Core Track)

    - + - diff --git a/archive/2023/results/qf-ufnia-incremental.html b/archive/2023/results/qf-ufnia-incremental.html index fc1ce9fc..de06b231 100644 --- a/archive/2023/results/qf-ufnia-incremental.html +++ b/archive/2023/results/qf-ufnia-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFNIA (Incremental Track)

    Competition results for the QF_UFNIA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_UFNIA (Incremental Track)

    Parallel Performance cvc5 - - + + @@ -124,7 +124,7 @@

    QF_UFNIA (Incremental Track)

    - + 2022-z3-4.8.17n 0 1464639.9172.6600 @@ -133,7 +133,7 @@

    QF_UFNIA (Incremental Track)

    - + cvc5 0 1463217723.8117773.45141 @@ -142,7 +142,7 @@

    QF_UFNIA (Incremental Track)

    - + Yices2 Fixedn 0 144721622.621653.691746 @@ -151,7 +151,7 @@

    QF_UFNIA (Incremental Track)

    - + Yices2 0 144721643.21675.031746 @@ -160,7 +160,7 @@

    QF_UFNIA (Incremental Track)

    - + SMTInterpol 0 119291371.97583.827170 @@ -184,7 +184,6 @@

    QF_UFNIA (Incremental Track)

    - + - diff --git a/archive/2023/results/qf-ufnia-model-validation.html b/archive/2023/results/qf-ufnia-model-validation.html index f08c04b0..54f8965a 100644 --- a/archive/2023/results/qf-ufnia-model-validation.html +++ b/archive/2023/results/qf-ufnia-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFNIA (Model Validation Track)

    Competition results for the QF_UFNIA - + logic - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_UFNIA (Model Validation Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    QF_UFNIA (Model Validation Track)

    - + cvc5 0 263 @@ -137,7 +137,7 @@

    QF_UFNIA (Model Validation Track)

    - + Yices2 0 223 @@ -159,7 +159,7 @@

    QF_UFNIA (Model Validation Track)

    - + cvc5 0 2634701.2514701.237 @@ -168,7 +168,7 @@

    QF_UFNIA (Model Validation Track)

    - + Yices2 0 2231626.0821613.03212 @@ -192,7 +192,6 @@

    QF_UFNIA (Model Validation Track)

    - + - diff --git a/archive/2023/results/qf-ufnia-proof-exhibition.html b/archive/2023/results/qf-ufnia-proof-exhibition.html index cf80b164..688f253f 100644 --- a/archive/2023/results/qf-ufnia-proof-exhibition.html +++ b/archive/2023/results/qf-ufnia-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFNIA (Proof Exhibition Track)

    Competition results for the QF_UFNIA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_UFNIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 167 @@ -130,7 +130,7 @@

    QF_UFNIA (Proof Exhibition Track)

    - + cvc5 0 149 @@ -141,7 +141,7 @@

    QF_UFNIA (Proof Exhibition Track)

    - + SMTInterpol 0 58 @@ -163,7 +163,7 @@

    QF_UFNIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 167241.4240.27399 @@ -172,7 +172,7 @@

    QF_UFNIA (Proof Exhibition Track)

    - + cvc5 0 149548.387542.9532722 @@ -181,7 +181,7 @@

    QF_UFNIA (Proof Exhibition Track)

    - + SMTInterpol 0 58109.32257.0331180 @@ -205,7 +205,6 @@

    QF_UFNIA (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/qf-ufnia-single-query.html b/archive/2023/results/qf-ufnia-single-query.html index a2d07b47..671e9665 100644 --- a/archive/2023/results/qf-ufnia-single-query.html +++ b/archive/2023/results/qf-ufnia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFNIA (Single Query Track)

    Competition results for the QF_UFNIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_UFNIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    QF_UFNIA (Single Query Track)

    - + cvc5 0 242 @@ -142,7 +142,7 @@

    QF_UFNIA (Single Query Track)

    - + 2020-CVC4n 0 241 @@ -153,7 +153,7 @@

    QF_UFNIA (Single Query Track)

    - + Yices2 0 217 @@ -164,7 +164,7 @@

    QF_UFNIA (Single Query Track)

    - + SMTInterpol 0 65 @@ -186,7 +186,7 @@

    QF_UFNIA (Single Query Track)

    - + cvc5 0 2425476.5325477.652242173695656 @@ -195,7 +195,7 @@

    QF_UFNIA (Single Query Track)

    - + 2020-CVC4n 0 2415216.3895217.191241172695736 @@ -204,7 +204,7 @@

    QF_UFNIA (Single Query Track)

    - + Yices2 0 2171911.5021911.949217169488181 @@ -213,7 +213,7 @@

    QF_UFNIA (Single Query Track)

    - + SMTInterpol 0 651036.113520.0656550152330 @@ -233,7 +233,7 @@

    QF_UFNIA (Single Query Track)

    - + cvc5 0 1735306.7375307.73117317301411156 @@ -242,7 +242,7 @@

    QF_UFNIA (Single Query Track)

    - + 2020-CVC4n 0 1723991.4833992.14317217201511136 @@ -251,7 +251,7 @@

    QF_UFNIA (Single Query Track)

    - + Yices2 0 1691906.1911906.57716916901811181 @@ -260,7 +260,7 @@

    QF_UFNIA (Single Query Track)

    - + SMTInterpol 0 50995.163503.921505001371110 @@ -280,7 +280,7 @@

    QF_UFNIA (Single Query Track)

    - + cvc5 0 69169.795169.921690691121856 @@ -289,7 +289,7 @@

    QF_UFNIA (Single Query Track)

    - + 2020-CVC4n 0 691224.9061225.049690691121836 @@ -298,7 +298,7 @@

    QF_UFNIA (Single Query Track)

    - + Yices2 0 485.3115.372480483221881 @@ -307,7 +307,7 @@

    QF_UFNIA (Single Query Track)

    - + SMTInterpol 0 1540.95116.14515015652180 @@ -327,7 +327,7 @@

    QF_UFNIA (Single Query Track)

    - + cvc5 0 223209.416209.434223156677575 @@ -336,7 +336,7 @@

    QF_UFNIA (Single Query Track)

    - + 2020-CVC4n 0 217210.88210.836217152658163 @@ -345,7 +345,7 @@

    QF_UFNIA (Single Query Track)

    - + Yices2 0 20979.85479.965209161488989 @@ -354,7 +354,7 @@

    QF_UFNIA (Single Query Track)

    - + SMTInterpol 0 55305.679120.03955401524311 @@ -378,7 +378,6 @@

    QF_UFNIA (Single Query Track)

    - + - diff --git a/archive/2023/results/qf-ufnia-unsat-core.html b/archive/2023/results/qf-ufnia-unsat-core.html index 8ef17b2d..a1ab3816 100644 --- a/archive/2023/results/qf-ufnia-unsat-core.html +++ b/archive/2023/results/qf-ufnia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFNIA (Unsat Core Track)

    Competition results for the QF_UFNIA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_UFNIA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    QF_UFNIA (Unsat Core Track)

    - + 2021-cvc5-ucn 0 101867 @@ -137,7 +137,7 @@

    QF_UFNIA (Unsat Core Track)

    - + cvc5 0 100833 @@ -148,7 +148,7 @@

    QF_UFNIA (Unsat Core Track)

    - + SMTInterpol 0 2058 @@ -170,7 +170,7 @@

    QF_UFNIA (Unsat Core Track)

    - + 2021-cvc5-ucn 0 101867194.108194.0483 @@ -179,7 +179,7 @@

    QF_UFNIA (Unsat Core Track)

    - + cvc5 0 100833244.39184.68 @@ -188,7 +188,7 @@

    QF_UFNIA (Unsat Core Track)

    - + SMTInterpol 0 205876.01738.8140 @@ -212,7 +212,6 @@

    QF_UFNIA (Unsat Core Track)

    - + - diff --git a/archive/2023/results/qf-ufnra-incremental.html b/archive/2023/results/qf-ufnra-incremental.html index 0076c89f..e505682c 100644 --- a/archive/2023/results/qf-ufnra-incremental.html +++ b/archive/2023/results/qf-ufnra-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFNRA (Incremental Track)

    Competition results for the QF_UFNRA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    QF_UFNRA (Incremental Track)

    Parallel Performance Yices2 - - + + @@ -124,7 +124,7 @@

    QF_UFNRA (Incremental Track)

    - + Yices2 0 60.00.0100 @@ -133,7 +133,7 @@

    QF_UFNRA (Incremental Track)

    - + Yices2 Fixedn 0 60.00.0100 @@ -142,7 +142,7 @@

    QF_UFNRA (Incremental Track)

    - + 2022-z3-4.8.17n 0 60.00.0200 @@ -151,7 +151,7 @@

    QF_UFNRA (Incremental Track)

    - + cvc5 0 60.00.0300 @@ -160,7 +160,7 @@

    QF_UFNRA (Incremental Track)

    - + SMTInterpol 0 60.00.4100 @@ -184,7 +184,6 @@

    QF_UFNRA (Incremental Track)

    - + - diff --git a/archive/2023/results/qf-ufnra-model-validation.html b/archive/2023/results/qf-ufnra-model-validation.html index 8510fad4..9de14ebf 100644 --- a/archive/2023/results/qf-ufnra-model-validation.html +++ b/archive/2023/results/qf-ufnra-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFNRA (Model Validation Track)

    Competition results for the QF_UFNRA - + logic - + in the Model Validation Track.

    @@ -104,8 +104,8 @@

    QF_UFNRA (Model Validation Track)

    Sequential PerformanceParallel Performance Yices2Yices2 - - + + @@ -126,7 +126,7 @@

    QF_UFNRA (Model Validation Track)

    - + Yices2 0 31 @@ -137,7 +137,7 @@

    QF_UFNRA (Model Validation Track)

    - + cvc5 0 25 @@ -159,7 +159,7 @@

    QF_UFNRA (Model Validation Track)

    - + Yices2 0 311145.6651145.9280 @@ -168,7 +168,7 @@

    QF_UFNRA (Model Validation Track)

    - + cvc5 0 253326.143326.5077 @@ -192,7 +192,6 @@

    QF_UFNRA (Model Validation Track)

    - + - diff --git a/archive/2023/results/qf-ufnra-proof-exhibition.html b/archive/2023/results/qf-ufnra-proof-exhibition.html index 3afcfc8d..96ea4c4f 100644 --- a/archive/2023/results/qf-ufnra-proof-exhibition.html +++ b/archive/2023/results/qf-ufnra-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFNRA (Proof Exhibition Track)

    Competition results for the QF_UFNRA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    QF_UFNRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 25 @@ -130,7 +130,7 @@

    QF_UFNRA (Proof Exhibition Track)

    - + cvc5 0 21 @@ -141,7 +141,7 @@

    QF_UFNRA (Proof Exhibition Track)

    - + SMTInterpol 0 8 @@ -163,7 +163,7 @@

    QF_UFNRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 2571.10771.08900 @@ -172,7 +172,7 @@

    QF_UFNRA (Proof Exhibition Track)

    - + cvc5 0 21890.3882.09241 @@ -181,7 +181,7 @@

    QF_UFNRA (Proof Exhibition Track)

    - + SMTInterpol 0 89.7756.355170 @@ -205,7 +205,6 @@

    QF_UFNRA (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/qf-ufnra-single-query.html b/archive/2023/results/qf-ufnra-single-query.html index 1579f8b1..82fce01a 100644 --- a/archive/2023/results/qf-ufnra-single-query.html +++ b/archive/2023/results/qf-ufnra-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFNRA (Single Query Track)

    Competition results for the QF_UFNRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    QF_UFNRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) Yices2Yices2Yices2 - - + + Yices2 - - + + Yices2 - + @@ -131,7 +131,7 @@

    QF_UFNRA (Single Query Track)

    - + Yices2 0 47 @@ -142,7 +142,7 @@

    QF_UFNRA (Single Query Track)

    - + cvc5 0 36 @@ -153,7 +153,7 @@

    QF_UFNRA (Single Query Track)

    - + 2020-CVC4n 0 13 @@ -164,7 +164,7 @@

    QF_UFNRA (Single Query Track)

    - + SMTInterpol 0 1 @@ -186,7 +186,7 @@

    QF_UFNRA (Single Query Track)

    - + Yices2 0 471151.3411151.57347311611 @@ -195,7 +195,7 @@

    QF_UFNRA (Single Query Track)

    - + cvc5 0 364456.3954456.7893624121212 @@ -204,7 +204,7 @@

    QF_UFNRA (Single Query Track)

    - + 2020-CVC4n 0 13891.708891.828131123531 @@ -213,7 +213,7 @@

    QF_UFNRA (Single Query Track)

    - + SMTInterpol 0 10.5870.392110470 @@ -233,7 +233,7 @@

    QF_UFNRA (Single Query Track)

    - + Yices2 0 311144.641144.871313100171 @@ -242,7 +242,7 @@

    QF_UFNRA (Single Query Track)

    - + cvc5 0 243576.7333577.0732424071712 @@ -251,7 +251,7 @@

    QF_UFNRA (Single Query Track)

    - + 2020-CVC4n 0 11888.732888.85211110201731 @@ -260,7 +260,7 @@

    QF_UFNRA (Single Query Track)

    - + SMTInterpol 0 10.5870.39211030170 @@ -280,7 +280,7 @@

    QF_UFNRA (Single Query Track)

    - + Yices2 0 166.76.701160160321 @@ -289,7 +289,7 @@

    QF_UFNRA (Single Query Track)

    - + cvc5 0 12879.662879.7161201243212 @@ -298,7 +298,7 @@

    QF_UFNRA (Single Query Track)

    - + 2020-CVC4n 0 22.9762.976202143231 @@ -307,7 +307,7 @@

    QF_UFNRA (Single Query Track)

    - + SMTInterpol 0 00.00.000016320 @@ -327,7 +327,7 @@

    QF_UFNRA (Single Query Track)

    - + Yices2 0 2822.52122.5262812162020 @@ -336,7 +336,7 @@

    QF_UFNRA (Single Query Track)

    - + cvc5 0 2089.52589.542010102828 @@ -345,7 +345,7 @@

    QF_UFNRA (Single Query Track)

    - + 2020-CVC4n 0 524.32324.3225324339 @@ -354,7 +354,7 @@

    QF_UFNRA (Single Query Track)

    - + SMTInterpol 0 10.5870.392110470 @@ -378,7 +378,6 @@

    QF_UFNRA (Single Query Track)

    - + - diff --git a/archive/2023/results/qf-ufnra-unsat-core.html b/archive/2023/results/qf-ufnra-unsat-core.html index b41a1dc9..df8f7e96 100644 --- a/archive/2023/results/qf-ufnra-unsat-core.html +++ b/archive/2023/results/qf-ufnra-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    QF_UFNRA (Unsat Core Track)

    Competition results for the QF_UFNRA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    QF_UFNRA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    QF_UFNRA (Unsat Core Track)

    - + 2021-cvc5-ucn 0 360 @@ -137,7 +137,7 @@

    QF_UFNRA (Unsat Core Track)

    - + cvc5 0 354 @@ -148,7 +148,7 @@

    QF_UFNRA (Unsat Core Track)

    - + SMTInterpol 0 23 @@ -170,7 +170,7 @@

    QF_UFNRA (Unsat Core Track)

    - + 2021-cvc5-ucn 0 36041.75641.7560 @@ -179,7 +179,7 @@

    QF_UFNRA (Unsat Core Track)

    - + cvc5 0 35433.9832.9320 @@ -188,7 +188,7 @@

    QF_UFNRA (Unsat Core Track)

    - + SMTInterpol 0 235.1533.4150 @@ -212,7 +212,6 @@

    QF_UFNRA (Unsat Core Track)

    - + - diff --git a/archive/2023/results/results-cloud.html b/archive/2023/results/results-cloud.html index eeb9811a..ad13727d 100644 --- a/archive/2023/results/results-cloud.html +++ b/archive/2023/results/results-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -112,7 +112,6 @@

    SMT-COMP 2023 Results - Cloud Track (Summary)

    - + - diff --git a/archive/2023/results/results-incremental.html b/archive/2023/results/results-incremental.html index 226f1153..daabe983 100644 --- a/archive/2023/results/results-incremental.html +++ b/archive/2023/results/results-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -150,7 +150,6 @@

    SMT-COMP 2023 Results - Incremental Track (Summary)

    - + - diff --git a/archive/2023/results/results-model-validation.html b/archive/2023/results/results-model-validation.html index 90076506..a2e58de4 100644 --- a/archive/2023/results/results-model-validation.html +++ b/archive/2023/results/results-model-validation.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -176,7 +176,6 @@

    SMT-COMP 2023 Results - Model Validation Track (Summary)

    - + - diff --git a/archive/2023/results/results-parallel.html b/archive/2023/results/results-parallel.html index b8862cc6..48ab28ab 100644 --- a/archive/2023/results/results-parallel.html +++ b/archive/2023/results/results-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -119,7 +119,6 @@

    SMT-COMP 2023 Results - Parallel Track (Summary)

    - + - diff --git a/archive/2023/results/results-single-query.html b/archive/2023/results/results-single-query.html index b86cc62f..5e362463 100644 --- a/archive/2023/results/results-single-query.html +++ b/archive/2023/results/results-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -234,7 +234,6 @@

    SMT-COMP 2023 Results - Single Query Track (Summary)

    - + - diff --git a/archive/2023/results/results-unsat-core.html b/archive/2023/results/results-unsat-core.html index 6b577a4c..14013737 100644 --- a/archive/2023/results/results-unsat-core.html +++ b/archive/2023/results/results-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -162,7 +162,6 @@

    SMT-COMP 2023 Results - Unsat Core Track (Summary)

    - + - diff --git a/archive/2023/results/uf-cloud.html b/archive/2023/results/uf-cloud.html index 0f61abcd..1e71f97f 100644 --- a/archive/2023/results/uf-cloud.html +++ b/archive/2023/results/uf-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UF (Cloud Track)

    Competition results for the UF - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    UF (Cloud Track)

    - + cvc5 0 143338.75914410530 @@ -126,7 +126,7 @@

    UF (Cloud Track)

    - + Vampire 1 224105.362221111450 @@ -146,7 +146,7 @@

    UF (Cloud Track)

    - + Vampire 0 111120.049111103530 @@ -155,7 +155,7 @@

    UF (Cloud Track)

    - + cvc5 0 41582.48844010530 @@ -175,7 +175,7 @@

    UF (Cloud Track)

    - + cvc5 0 101756.271100103540 @@ -184,7 +184,7 @@

    UF (Cloud Track)

    - + Vampire 1 112985.313110112540 @@ -204,7 +204,7 @@

    UF (Cloud Track)

    - + cvc5 0 442.7464136357 @@ -213,7 +213,7 @@

    UF (Cloud Track)

    - + Vampire 0 00.00006767 @@ -237,7 +237,6 @@

    UF (Cloud Track)

    - + - diff --git a/archive/2023/results/uf-incremental.html b/archive/2023/results/uf-incremental.html index e2b90bdd..404d6b00 100644 --- a/archive/2023/results/uf-incremental.html +++ b/archive/2023/results/uf-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UF (Incremental Track)

    Competition results for the UF - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    UF (Incremental Track)

    Parallel Performance cvc5 - - + + @@ -124,7 +124,7 @@

    UF (Incremental Track)

    - + 2020-z3n 0 107078536819.18537015.77252499919 @@ -133,7 +133,7 @@

    UF (Incremental Track)

    - + cvc5 0 2688139903.8740050.99332696932 @@ -142,7 +142,7 @@

    UF (Incremental Track)

    - + SMTInterpol 0 18342138811.7678847.433412351621 @@ -151,7 +151,7 @@

    UF (Incremental Track)

    - + UltimateEliminator+MathSAT 0 00.00.03595770 @@ -160,7 +160,7 @@

    UF (Incremental Track)

    - + Yices2 0 00.00.03595770 @@ -169,7 +169,7 @@

    UF (Incremental Track)

    - + Yices2 Fixedn 0 00.00.03595770 @@ -193,7 +193,6 @@

    UF (Incremental Track)

    - + - diff --git a/archive/2023/results/uf-parallel.html b/archive/2023/results/uf-parallel.html index 61493bb5..cd6180b1 100644 --- a/archive/2023/results/uf-parallel.html +++ b/archive/2023/results/uf-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UF (Parallel Track)

    Competition results for the UF - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    UF (Parallel Track)

    - + iProver 0 4346.739413480 @@ -126,7 +126,7 @@

    UF (Parallel Track)

    - + Vampire 1 181318.59318810340 @@ -146,7 +146,7 @@

    UF (Parallel Track)

    - + Vampire 0 8711.9988803410 @@ -155,7 +155,7 @@

    UF (Parallel Track)

    - + iProver 0 1194.44411010410 @@ -175,7 +175,7 @@

    UF (Parallel Track)

    - + iProver 0 3152.2953038410 @@ -184,7 +184,7 @@

    UF (Parallel Track)

    - + Vampire 1 10606.595100101410 @@ -204,7 +204,7 @@

    UF (Parallel Track)

    - + Vampire 0 659.5036334646 @@ -213,7 +213,7 @@

    UF (Parallel Track)

    - + iProver 0 00.00005252 @@ -237,7 +237,6 @@

    UF (Parallel Track)

    - + - diff --git a/archive/2023/results/uf-proof-exhibition.html b/archive/2023/results/uf-proof-exhibition.html index a52ecea9..f510336e 100644 --- a/archive/2023/results/uf-proof-exhibition.html +++ b/archive/2023/results/uf-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UF (Proof Exhibition Track)

    Competition results for the UF - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    UF (Proof Exhibition Track)

    - + cvc5-lfsc 0 818 @@ -130,7 +130,7 @@

    UF (Proof Exhibition Track)

    - + cvc5 0 797 @@ -141,7 +141,7 @@

    UF (Proof Exhibition Track)

    - + SMTInterpol 0 468 @@ -163,7 +163,7 @@

    UF (Proof Exhibition Track)

    - + cvc5-lfsc 0 8186314.7826307.722159159 @@ -172,7 +172,7 @@

    UF (Proof Exhibition Track)

    - + cvc5 0 7976817.6076785.18180172 @@ -181,7 +181,7 @@

    UF (Proof Exhibition Track)

    - + SMTInterpol 0 46832266.82123749.133509486 @@ -205,7 +205,6 @@

    UF (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/uf-single-query.html b/archive/2023/results/uf-single-query.html index 41db6c8a..a5b6c5f1 100644 --- a/archive/2023/results/uf-single-query.html +++ b/archive/2023/results/uf-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UF (Single Query Track)

    Competition results for the UF - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UF (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) VampireVampireVampire - - + + cvc5 - - + + Vampire - + @@ -131,7 +131,7 @@

    UF (Single Query Track)

    - + 2022-cvc5n 0 1176 @@ -142,7 +142,7 @@

    UF (Single Query Track)

    - + Vampire 0 1175 @@ -153,7 +153,7 @@

    UF (Single Query Track)

    - + cvc5 0 1171 @@ -164,7 +164,7 @@

    UF (Single Query Track)

    - + iProver Fixedn 0 629 @@ -175,7 +175,7 @@

    UF (Single Query Track)

    - + iProver 0 581 @@ -186,7 +186,7 @@

    UF (Single Query Track)

    - + Yices2 0 343 @@ -197,7 +197,7 @@

    UF (Single Query Track)

    - + SMTInterpol 0 217 @@ -208,7 +208,7 @@

    UF (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -230,7 +230,7 @@

    UF (Single Query Track)

    - + Vampire 0 1237216888.69554715.785123747676116201620 @@ -239,7 +239,7 @@

    UF (Single Query Track)

    - + 2022-cvc5n 0 1176213427.426217818.07117638778916811681 @@ -248,7 +248,7 @@

    UF (Single Query Track)

    - + cvc5 0 1171214413.233221556.195117138678516861686 @@ -257,7 +257,7 @@

    UF (Single Query Track)

    - + Yices2 0 34311422.42711419.253433031325142514 @@ -266,7 +266,7 @@

    UF (Single Query Track)

    - + SMTInterpol 0 22124922.63317727.9232211021126362522 @@ -275,7 +275,7 @@

    UF (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.000028570 @@ -284,7 +284,7 @@

    UF (Single Query Track)

    - + iProver Fixedn 9 895434006.382110480.24789522167419621932 @@ -293,7 +293,7 @@

    UF (Single Query Track)

    - + iProver 9 676202496.09651597.241676067621812135 @@ -313,7 +313,7 @@

    UF (Single Query Track)

    - + Vampire 0 47632206.6958170.7654764760823731620 @@ -322,7 +322,7 @@

    UF (Single Query Track)

    - + 2022-cvc5n 0 387191038.162195161.64938738709723731681 @@ -331,7 +331,7 @@

    UF (Single Query Track)

    - + cvc5 0 386190476.73196661.36938638609823731686 @@ -340,7 +340,7 @@

    UF (Single Query Track)

    - + iProver Fixedn 0 221245570.08162357.353221221026323731932 @@ -349,7 +349,7 @@

    UF (Single Query Track)

    - + Yices2 0 308.6968.7883030045423732514 @@ -358,7 +358,7 @@

    UF (Single Query Track)

    - + SMTInterpol 0 10274.832229.7181010047423732522 @@ -367,7 +367,7 @@

    UF (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.000048423730 @@ -376,7 +376,7 @@

    UF (Single Query Track)

    - + iProver 0 00.00.000048423732135 @@ -396,7 +396,7 @@

    UF (Single Query Track)

    - + 2022-cvc5n 0 78922389.26422656.421789078910619621681 @@ -405,7 +405,7 @@

    UF (Single Query Track)

    - + cvc5 0 78523936.50224894.826785078511019621686 @@ -414,7 +414,7 @@

    UF (Single Query Track)

    - + Vampire 0 761184682.00146545.021761076113419621620 @@ -423,7 +423,7 @@

    UF (Single Query Track)

    - + Yices2 0 31311413.73111410.462313031358219622514 @@ -432,7 +432,7 @@

    UF (Single Query Track)

    - + SMTInterpol 0 21124647.80117498.205211021168419622522 @@ -441,7 +441,7 @@

    UF (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.000089519620 @@ -450,7 +450,7 @@

    UF (Single Query Track)

    - + iProver 9 676202496.09651597.241676067621919622135 @@ -459,7 +459,7 @@

    UF (Single Query Track)

    - + iProver Fixedn 9 674188436.30148122.894674067422119621932 @@ -479,7 +479,7 @@

    UF (Single Query Track)

    - + Vampire 0 9266434.5891732.8292635557119311931 @@ -488,7 +488,7 @@

    UF (Single Query Track)

    - + cvc5 0 668641.487635.8946681265621892189 @@ -497,7 +497,7 @@

    UF (Single Query Track)

    - + 2022-cvc5n 0 666549.146545.4656661465221912191 @@ -506,7 +506,7 @@

    UF (Single Query Track)

    - + iProver Fixedn 0 4976821.6121872.7384973446323602360 @@ -515,7 +515,7 @@

    UF (Single Query Track)

    - + iProver 0 4596217.6391741.735459045923982384 @@ -524,7 +524,7 @@

    UF (Single Query Track)

    - + Yices2 0 290534.986530.3782903026025672567 @@ -533,7 +533,7 @@

    UF (Single Query Track)

    - + SMTInterpol 0 1411414.888646.357141813327162701 @@ -542,7 +542,7 @@

    UF (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.0000285790 @@ -566,7 +566,6 @@

    UF (Single Query Track)

    - + - diff --git a/archive/2023/results/uf-unsat-core.html b/archive/2023/results/uf-unsat-core.html index 963be9d1..bed6b2db 100644 --- a/archive/2023/results/uf-unsat-core.html +++ b/archive/2023/results/uf-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UF (Unsat Core Track)

    Competition results for the UF - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    UF (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    UF (Unsat Core Track)

    - + cvc5 0 486939 @@ -137,7 +137,7 @@

    UF (Unsat Core Track)

    - + 2020-CVC4-ucn 0 485447 @@ -148,7 +148,7 @@

    UF (Unsat Core Track)

    - + 2022-Vampiren 0 479413 @@ -159,7 +159,7 @@

    UF (Unsat Core Track)

    - + Vampire 0 388228 @@ -170,7 +170,7 @@

    UF (Unsat Core Track)

    - + SMTInterpol 0 284562 @@ -181,7 +181,7 @@

    UF (Unsat Core Track)

    - + Yices2 0 0 @@ -192,7 +192,7 @@

    UF (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -214,7 +214,7 @@

    UF (Unsat Core Track)

    - + 2022-Vampiren 0 492157154689.31339095.993133 @@ -223,7 +223,7 @@

    UF (Unsat Core Track)

    - + cvc5 0 4869398018.0917975.674220 @@ -232,7 +232,7 @@

    UF (Unsat Core Track)

    - + 2020-CVC4-ucn 0 4854479012.7678969.792243 @@ -241,7 +241,7 @@

    UF (Unsat Core Track)

    - + Vampire 0 398845117085.71529574.564127 @@ -250,7 +250,7 @@

    UF (Unsat Core Track)

    - + SMTInterpol 0 28666666905.27349957.716822 @@ -259,7 +259,7 @@

    UF (Unsat Core Track)

    - + Yices2 0 00.00.00 @@ -268,7 +268,7 @@

    UF (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 00.00.00 @@ -292,7 +292,6 @@

    UF (Unsat Core Track)

    - + - diff --git a/archive/2023/results/ufbv-proof-exhibition.html b/archive/2023/results/ufbv-proof-exhibition.html index 5833434e..ae121934 100644 --- a/archive/2023/results/ufbv-proof-exhibition.html +++ b/archive/2023/results/ufbv-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFBV (Proof Exhibition Track)

    Competition results for the UFBV - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    UFBV (Proof Exhibition Track)

    - + cvc5-lfsc 0 69 @@ -130,7 +130,7 @@

    UFBV (Proof Exhibition Track)

    - + cvc5 0 27 @@ -152,7 +152,7 @@

    UFBV (Proof Exhibition Track)

    - + cvc5-lfsc 0 696812.2786811.9775810 @@ -161,7 +161,7 @@

    UFBV (Proof Exhibition Track)

    - + cvc5 0 27642.159628.510047 @@ -185,7 +185,6 @@

    UFBV (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/ufbv-single-query.html b/archive/2023/results/ufbv-single-query.html index 00856c2b..a35f788d 100644 --- a/archive/2023/results/ufbv-single-query.html +++ b/archive/2023/results/ufbv-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFBV (Single Query Track)

    Competition results for the UFBV - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UFBV (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) BitwuzlaBitwuzlaBitwuzla - - + + Bitwuzla - - + + Bitwuzla - + @@ -131,7 +131,7 @@

    UFBV (Single Query Track)

    - + 2022-z3-4.8.17n 0 105 @@ -142,7 +142,7 @@

    UFBV (Single Query Track)

    - + Bitwuzla Fixedn 0 98 @@ -153,7 +153,7 @@

    UFBV (Single Query Track)

    - + Bitwuzla 0 97 @@ -164,7 +164,7 @@

    UFBV (Single Query Track)

    - + cvc5 0 94 @@ -175,7 +175,7 @@

    UFBV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 6 @@ -186,7 +186,7 @@

    UFBV (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 0 0 @@ -208,7 +208,7 @@

    UFBV (Single Query Track)

    - + 2022-z3-4.8.17n 0 1053379.9463380.30410536693936 @@ -217,7 +217,7 @@

    UFBV (Single Query Track)

    - + Bitwuzla Fixedn 0 982481.8652480.0389819794646 @@ -226,7 +226,7 @@

    UFBV (Single Query Track)

    - + Bitwuzla 0 972523.9442501.8959719784747 @@ -235,7 +235,7 @@

    UFBV (Single Query Track)

    - + cvc5 0 945265.2315445.1119418765018 @@ -244,7 +244,7 @@

    UFBV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 655.73525.2796061384 @@ -253,7 +253,7 @@

    UFBV (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 0 00.00.000014434 @@ -273,7 +273,7 @@

    UFBV (Single Query Track)

    - + 2022-z3-4.8.17n 0 361676.2131676.44336360110736 @@ -282,7 +282,7 @@

    UFBV (Single Query Track)

    - + Bitwuzla Fixedn 0 19238.84238.987191901810746 @@ -291,7 +291,7 @@

    UFBV (Single Query Track)

    - + Bitwuzla 0 191019.172996.614191901810747 @@ -300,7 +300,7 @@

    UFBV (Single Query Track)

    - + cvc5 0 182887.7233009.464181801910718 @@ -309,7 +309,7 @@

    UFBV (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 0 00.00.00003710734 @@ -318,7 +318,7 @@

    UFBV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.0000371074 @@ -338,7 +338,7 @@

    UFBV (Single Query Track)

    - + Bitwuzla Fixedn 0 792243.0252241.0517907975846 @@ -347,7 +347,7 @@

    UFBV (Single Query Track)

    - + Bitwuzla 0 781504.7711505.2817807885847 @@ -356,7 +356,7 @@

    UFBV (Single Query Track)

    - + cvc5 0 762377.5082435.64776076105818 @@ -365,7 +365,7 @@

    UFBV (Single Query Track)

    - + 2022-z3-4.8.17n 0 691703.7331703.86169069175836 @@ -374,7 +374,7 @@

    UFBV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 655.73525.27960680584 @@ -383,7 +383,7 @@

    UFBV (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 0 00.00.0000865834 @@ -403,7 +403,7 @@

    UFBV (Single Query Track)

    - + 2022-z3-4.8.17n 0 94136.907136.8139431635050 @@ -412,7 +412,7 @@

    UFBV (Single Query Track)

    - + Bitwuzla Fixedn 0 91267.347265.2839117745353 @@ -421,7 +421,7 @@

    UFBV (Single Query Track)

    - + Bitwuzla 0 90274.046251.3689016745454 @@ -430,7 +430,7 @@

    UFBV (Single Query Track)

    - + cvc5 0 60206.439182.739601598484 @@ -439,7 +439,7 @@

    UFBV (Single Query Track)

    - + UltimateEliminator+MathSAT 0 655.73525.2796061387 @@ -448,7 +448,7 @@

    UFBV (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 0 00.00.000014482 @@ -472,7 +472,6 @@

    UFBV (Single Query Track)

    - + - diff --git a/archive/2023/results/ufbvdt-single-query.html b/archive/2023/results/ufbvdt-single-query.html index 74add0a8..9ec6504b 100644 --- a/archive/2023/results/ufbvdt-single-query.html +++ b/archive/2023/results/ufbvdt-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFBVDT (Single Query Track)

    Competition results for the UFBVDT - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UFBVDT (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) ——— - - + + — - - + + — - + @@ -131,7 +131,7 @@

    UFBVDT (Single Query Track)

    - + cvc5 0 0 @@ -142,7 +142,7 @@

    UFBVDT (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 0 0 @@ -164,7 +164,7 @@

    UFBVDT (Single Query Track)

    - + cvc5 0 00.00.000010 @@ -173,7 +173,7 @@

    UFBVDT (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 0 00.00.000010 @@ -193,7 +193,7 @@

    UFBVDT (Single Query Track)

    - + cvc5 0 00.00.0000010 @@ -202,7 +202,7 @@

    UFBVDT (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 0 00.00.0000010 @@ -222,7 +222,7 @@

    UFBVDT (Single Query Track)

    - + cvc5 0 00.00.0000010 @@ -231,7 +231,7 @@

    UFBVDT (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 0 00.00.0000010 @@ -251,7 +251,7 @@

    UFBVDT (Single Query Track)

    - + cvc5 0 00.00.000010 @@ -260,7 +260,7 @@

    UFBVDT (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 0 00.00.000010 @@ -284,7 +284,6 @@

    UFBVDT (Single Query Track)

    - + - diff --git a/archive/2023/results/ufbvfp-proof-exhibition.html b/archive/2023/results/ufbvfp-proof-exhibition.html index ad821876..e7a332b7 100644 --- a/archive/2023/results/ufbvfp-proof-exhibition.html +++ b/archive/2023/results/ufbvfp-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFBVFP (Proof Exhibition Track)

    Competition results for the UFBVFP - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    UFBVFP (Proof Exhibition Track)

    - + cvc5-lfsc 0 1 @@ -130,7 +130,7 @@

    UFBVFP (Proof Exhibition Track)

    - + cvc5 0 0 @@ -152,7 +152,7 @@

    UFBVFP (Proof Exhibition Track)

    - + cvc5-lfsc 0 14.1874.18610 @@ -161,7 +161,7 @@

    UFBVFP (Proof Exhibition Track)

    - + cvc5 0 00.00.021 @@ -185,7 +185,6 @@

    UFBVFP (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/ufbvfp-single-query.html b/archive/2023/results/ufbvfp-single-query.html index 8f57fb0f..f64a1bf5 100644 --- a/archive/2023/results/ufbvfp-single-query.html +++ b/archive/2023/results/ufbvfp-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFBVFP (Single Query Track)

    Competition results for the UFBVFP - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UFBVFP (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) BitwuzlaBitwuzla— - - + + Bitwuzla - - + + cvc5 - + @@ -131,7 +131,7 @@

    UFBVFP (Single Query Track)

    - + Bitwuzla Fixedn 0 2 @@ -142,7 +142,7 @@

    UFBVFP (Single Query Track)

    - + Bitwuzla 0 2 @@ -153,7 +153,7 @@

    UFBVFP (Single Query Track)

    - + cvc5 0 1 @@ -164,7 +164,7 @@

    UFBVFP (Single Query Track)

    - + 2022-z3-4.8.17n 0 1 @@ -175,7 +175,7 @@

    UFBVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -197,7 +197,7 @@

    UFBVFP (Single Query Track)

    - + Bitwuzla Fixedn 0 2222.687222.71420200 @@ -206,7 +206,7 @@

    UFBVFP (Single Query Track)

    - + Bitwuzla 0 2223.696223.85520200 @@ -215,7 +215,7 @@

    UFBVFP (Single Query Track)

    - + cvc5 0 10.170.1710110 @@ -224,7 +224,7 @@

    UFBVFP (Single Query Track)

    - + 2022-z3-4.8.17n 0 1593.55593.7810111 @@ -233,7 +233,7 @@

    UFBVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.000020 @@ -253,7 +253,7 @@

    UFBVFP (Single Query Track)

    - + 2022-z3-4.8.17n 0 00.00.0000021 @@ -262,7 +262,7 @@

    UFBVFP (Single Query Track)

    - + cvc5 0 00.00.0000020 @@ -271,7 +271,7 @@

    UFBVFP (Single Query Track)

    - + Bitwuzla 0 00.00.0000020 @@ -280,7 +280,7 @@

    UFBVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.0000020 @@ -289,7 +289,7 @@

    UFBVFP (Single Query Track)

    - + Bitwuzla Fixedn 0 00.00.0000020 @@ -309,7 +309,7 @@

    UFBVFP (Single Query Track)

    - + Bitwuzla Fixedn 0 2222.687222.714202000 @@ -318,7 +318,7 @@

    UFBVFP (Single Query Track)

    - + Bitwuzla 0 2223.696223.855202000 @@ -327,7 +327,7 @@

    UFBVFP (Single Query Track)

    - + cvc5 0 10.170.17101100 @@ -336,7 +336,7 @@

    UFBVFP (Single Query Track)

    - + 2022-z3-4.8.17n 0 1593.55593.78101101 @@ -345,7 +345,7 @@

    UFBVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.0000200 @@ -365,7 +365,7 @@

    UFBVFP (Single Query Track)

    - + cvc5 0 10.170.1710110 @@ -374,7 +374,7 @@

    UFBVFP (Single Query Track)

    - + Bitwuzla Fixedn 0 10.6550.65610111 @@ -383,7 +383,7 @@

    UFBVFP (Single Query Track)

    - + Bitwuzla 0 10.6560.65610111 @@ -392,7 +392,7 @@

    UFBVFP (Single Query Track)

    - + 2022-z3-4.8.17n 0 00.00.000022 @@ -401,7 +401,7 @@

    UFBVFP (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.000020 @@ -425,7 +425,6 @@

    UFBVFP (Single Query Track)

    - + - diff --git a/archive/2023/results/ufbvlia-proof-exhibition.html b/archive/2023/results/ufbvlia-proof-exhibition.html index 7ad86968..8b7ea3d8 100644 --- a/archive/2023/results/ufbvlia-proof-exhibition.html +++ b/archive/2023/results/ufbvlia-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFBVLIA (Proof Exhibition Track)

    Competition results for the UFBVLIA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    UFBVLIA (Proof Exhibition Track)

    - + cvc5 0 0 @@ -130,7 +130,7 @@

    UFBVLIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 0 @@ -152,7 +152,7 @@

    UFBVLIA (Proof Exhibition Track)

    - + cvc5 0 00.00.080 @@ -161,7 +161,7 @@

    UFBVLIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 00.00.080 @@ -185,7 +185,6 @@

    UFBVLIA (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/ufbvlia-single-query.html b/archive/2023/results/ufbvlia-single-query.html index 4085c085..ec9a7fa8 100644 --- a/archive/2023/results/ufbvlia-single-query.html +++ b/archive/2023/results/ufbvlia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFBVLIA (Single Query Track)

    Competition results for the UFBVLIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UFBVLIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5— - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    UFBVLIA (Single Query Track)

    - + 2022-z3-4.8.17n 0 4 @@ -142,7 +142,7 @@

    UFBVLIA (Single Query Track)

    - + cvc5 0 2 @@ -153,7 +153,7 @@

    UFBVLIA (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 0 0 @@ -164,7 +164,7 @@

    UFBVLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -186,7 +186,7 @@

    UFBVLIA (Single Query Track)

    - + 2022-z3-4.8.17n 0 4769.115769.249404204204 @@ -195,7 +195,7 @@

    UFBVLIA (Single Query Track)

    - + cvc5 0 2275.243378.558202206206 @@ -204,7 +204,7 @@

    UFBVLIA (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 0 00.00.00002080 @@ -213,7 +213,7 @@

    UFBVLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.0000208118 @@ -233,7 +233,7 @@

    UFBVLIA (Single Query Track)

    - + 2022-z3-4.8.17n 0 00.00.000020188204 @@ -242,7 +242,7 @@

    UFBVLIA (Single Query Track)

    - + cvc5 0 00.00.000020188206 @@ -251,7 +251,7 @@

    UFBVLIA (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 0 00.00.0000201880 @@ -260,7 +260,7 @@

    UFBVLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.000020188118 @@ -280,7 +280,7 @@

    UFBVLIA (Single Query Track)

    - + 2022-z3-4.8.17n 0 4769.115769.2494044200204 @@ -289,7 +289,7 @@

    UFBVLIA (Single Query Track)

    - + cvc5 0 2275.243378.5582026200206 @@ -298,7 +298,7 @@

    UFBVLIA (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 0 00.00.000082000 @@ -307,7 +307,7 @@

    UFBVLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.00008200118 @@ -327,7 +327,7 @@

    UFBVLIA (Single Query Track)

    - + 2022-z3-4.8.17n 0 218.41418.413202206206 @@ -336,7 +336,7 @@

    UFBVLIA (Single Query Track)

    - + cvc5 0 11.0081.008101207207 @@ -345,7 +345,7 @@

    UFBVLIA (Single Query Track)

    - + UltimateIntBlastingWrapper+SMTInterpol 0 00.00.00002080 @@ -354,7 +354,7 @@

    UFBVLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.0000208207 @@ -378,7 +378,6 @@

    UFBVLIA (Single Query Track)

    - + - diff --git a/archive/2023/results/ufdt-cloud.html b/archive/2023/results/ufdt-cloud.html index b942d05c..0c641bab 100644 --- a/archive/2023/results/ufdt-cloud.html +++ b/archive/2023/results/ufdt-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFDT (Cloud Track)

    Competition results for the UFDT - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    UFDT (Cloud Track)

    - + cvc5 0 269190.693261511430 @@ -126,7 +126,7 @@

    UFDT (Cloud Track)

    - + Vampire 2 309927.332301416390 @@ -146,7 +146,7 @@

    UFDT (Cloud Track)

    - + cvc5 0 157552.837151504500 @@ -155,7 +155,7 @@

    UFDT (Cloud Track)

    - + Vampire 0 146786.228141405500 @@ -175,7 +175,7 @@

    UFDT (Cloud Track)

    - + cvc5 0 111637.8561101111470 @@ -184,7 +184,7 @@

    UFDT (Cloud Track)

    - + Vampire 2 163141.104160166470 @@ -204,7 +204,7 @@

    UFDT (Cloud Track)

    - + cvc5 0 540.9655056461 @@ -213,7 +213,7 @@

    UFDT (Cloud Track)

    - + Vampire 0 00.00006969 @@ -237,7 +237,6 @@

    UFDT (Cloud Track)

    - + - diff --git a/archive/2023/results/ufdt-parallel.html b/archive/2023/results/ufdt-parallel.html index 42860319..9ed8bfad 100644 --- a/archive/2023/results/ufdt-parallel.html +++ b/archive/2023/results/ufdt-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFDT (Parallel Track)

    Competition results for the UFDT - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    UFDT (Parallel Track)

    - + iProver 0 243.919202550 @@ -126,7 +126,7 @@

    UFDT (Parallel Track)

    - + Vampire 2 263139.542261214310 @@ -146,7 +146,7 @@

    UFDT (Parallel Track)

    - + Vampire 0 121212.345121202430 @@ -155,7 +155,7 @@

    UFDT (Parallel Track)

    - + iProver 0 00.000014430 @@ -175,7 +175,7 @@

    UFDT (Parallel Track)

    - + iProver 0 243.91920212430 @@ -184,7 +184,7 @@

    UFDT (Parallel Track)

    - + Vampire 2 141927.197140140430 @@ -204,7 +204,7 @@

    UFDT (Parallel Track)

    - + iProver 0 111.1831015656 @@ -213,7 +213,7 @@

    UFDT (Parallel Track)

    - + Vampire 2 819.268084947 @@ -237,7 +237,6 @@

    UFDT (Parallel Track)

    - + - diff --git a/archive/2023/results/ufdt-proof-exhibition.html b/archive/2023/results/ufdt-proof-exhibition.html index 73094f86..4ff9bbf8 100644 --- a/archive/2023/results/ufdt-proof-exhibition.html +++ b/archive/2023/results/ufdt-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFDT (Proof Exhibition Track)

    Competition results for the UFDT - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    UFDT (Proof Exhibition Track)

    - + cvc5 0 593 @@ -130,7 +130,7 @@

    UFDT (Proof Exhibition Track)

    - + cvc5-lfsc 0 593 @@ -141,7 +141,7 @@

    UFDT (Proof Exhibition Track)

    - + SMTInterpol 0 349 @@ -163,7 +163,7 @@

    UFDT (Proof Exhibition Track)

    - + cvc5 0 5934789.8694785.781310302 @@ -172,7 +172,7 @@

    UFDT (Proof Exhibition Track)

    - + cvc5-lfsc 0 5935026.2215020.331310302 @@ -181,7 +181,7 @@

    UFDT (Proof Exhibition Track)

    - + SMTInterpol 0 34919770.53313578.779554532 @@ -205,7 +205,6 @@

    UFDT (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/ufdt-single-query.html b/archive/2023/results/ufdt-single-query.html index b3307c92..617f06cd 100644 --- a/archive/2023/results/ufdt-single-query.html +++ b/archive/2023/results/ufdt-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFDT (Single Query Track)

    Competition results for the UFDT - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UFDT (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    UFDT (Single Query Track)

    - + cvc5 0 564 @@ -142,7 +142,7 @@

    UFDT (Single Query Track)

    - + 2022-cvc5n 0 545 @@ -153,7 +153,7 @@

    UFDT (Single Query Track)

    - + iProver 0 288 @@ -164,7 +164,7 @@

    UFDT (Single Query Track)

    - + iProver Fixedn 0 285 @@ -175,7 +175,7 @@

    UFDT (Single Query Track)

    - + SMTInterpol 0 101 @@ -186,7 +186,7 @@

    UFDT (Single Query Track)

    - + Vampire 79 499 @@ -208,7 +208,7 @@

    UFDT (Single Query Track)

    - + cvc5 0 56483666.82288667.362564143421986986 @@ -217,7 +217,7 @@

    UFDT (Single Query Track)

    - + 2022-cvc5n 0 54582143.30485046.85854514140410051005 @@ -226,7 +226,7 @@

    UFDT (Single Query Track)

    - + iProver Fixedn 0 30779324.18320288.965307030712431238 @@ -235,7 +235,7 @@

    UFDT (Single Query Track)

    - + iProver 0 30672745.78718608.192306030612441240 @@ -244,7 +244,7 @@

    UFDT (Single Query Track)

    - + SMTInterpol 0 10316613.53111813.447103310014471397 @@ -253,7 +253,7 @@

    UFDT (Single Query Track)

    - + Vampire 79 571277092.15869755.91757127544979900 @@ -273,7 +273,7 @@

    UFDT (Single Query Track)

    - + cvc5 0 14365539.42869299.128143143031404986 @@ -282,7 +282,7 @@

    UFDT (Single Query Track)

    - + 2022-cvc5n 0 14163903.26166527.1351411410514041005 @@ -291,7 +291,7 @@

    UFDT (Single Query Track)

    - + Vampire 0 2784520.2621249.82272701191404900 @@ -300,7 +300,7 @@

    UFDT (Single Query Track)

    - + SMTInterpol 0 32.6411.51533014314041397 @@ -309,7 +309,7 @@

    UFDT (Single Query Track)

    - + iProver 0 00.00.000014614041240 @@ -318,7 +318,7 @@

    UFDT (Single Query Track)

    - + iProver Fixedn 0 00.00.000014614041238 @@ -338,7 +338,7 @@

    UFDT (Single Query Track)

    - + cvc5 0 42118127.39419368.2344210421181948986 @@ -347,7 +347,7 @@

    UFDT (Single Query Track)

    - + 2022-cvc5n 0 40418240.04218519.72340404041989481005 @@ -356,7 +356,7 @@

    UFDT (Single Query Track)

    - + iProver Fixedn 0 30779324.18320288.96530703072959481238 @@ -365,7 +365,7 @@

    UFDT (Single Query Track)

    - + iProver 0 30672745.78718608.19230603062969481240 @@ -374,7 +374,7 @@

    UFDT (Single Query Track)

    - + SMTInterpol 0 10016610.8911811.93210001005029481397 @@ -383,7 +383,7 @@

    UFDT (Single Query Track)

    - + Vampire 79 544192571.89848506.097544054458948900 @@ -403,7 +403,7 @@

    UFDT (Single Query Track)

    - + cvc5 0 346347.459347.516346434212041204 @@ -412,7 +412,7 @@

    UFDT (Single Query Track)

    - + 2022-cvc5n 0 327372.187372.254327532212231223 @@ -421,7 +421,7 @@

    UFDT (Single Query Track)

    - + Vampire 0 3114073.9791064.227311031112391239 @@ -430,7 +430,7 @@

    UFDT (Single Query Track)

    - + iProver Fixedn 0 2153875.1821072.59215021513351335 @@ -439,7 +439,7 @@

    UFDT (Single Query Track)

    - + iProver 0 2123683.9711020.139212021213381338 @@ -448,7 +448,7 @@

    UFDT (Single Query Track)

    - + SMTInterpol 0 61655.089291.1396135814891458 @@ -472,7 +472,6 @@

    UFDT (Single Query Track)

    - + - diff --git a/archive/2023/results/ufdt-unsat-core.html b/archive/2023/results/ufdt-unsat-core.html index 55b92411..b8cc610a 100644 --- a/archive/2023/results/ufdt-unsat-core.html +++ b/archive/2023/results/ufdt-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFDT (Unsat Core Track)

    Competition results for the UFDT - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    UFDT (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    UFDT (Unsat Core Track)

    - + 2022-Vampiren 0 338258 @@ -137,7 +137,7 @@

    UFDT (Unsat Core Track)

    - + cvc5 0 337860 @@ -148,7 +148,7 @@

    UFDT (Unsat Core Track)

    - + 2020-CVC4-ucn 0 335293 @@ -159,7 +159,7 @@

    UFDT (Unsat Core Track)

    - + SMTInterpol 0 211526 @@ -170,7 +170,7 @@

    UFDT (Unsat Core Track)

    - + Vampire 121 341260 @@ -192,7 +192,7 @@

    UFDT (Unsat Core Track)

    - + 2022-Vampiren 0 348440121907.7730568.053249 @@ -201,7 +201,7 @@

    UFDT (Unsat Core Track)

    - + cvc5 0 3378604160.6244156.027328 @@ -210,7 +210,7 @@

    UFDT (Unsat Core Track)

    - + 2020-CVC4-ucn 0 3352936341.6386340.998328 @@ -219,7 +219,7 @@

    UFDT (Unsat Core Track)

    - + SMTInterpol 0 21292337717.73825549.173676 @@ -228,7 +228,7 @@

    UFDT (Unsat Core Track)

    - + Vampire 121 351386201869.10950919.50354 @@ -252,7 +252,6 @@

    UFDT (Unsat Core Track)

    - + - diff --git a/archive/2023/results/ufdtlia-cloud.html b/archive/2023/results/ufdtlia-cloud.html index 914094a2..1012eb5b 100644 --- a/archive/2023/results/ufdtlia-cloud.html +++ b/archive/2023/results/ufdtlia-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFDTLIA (Cloud Track)

    Competition results for the UFDTLIA - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    UFDTLIA (Cloud Track)

    - + cvc5 0 2375.65621180 @@ -126,7 +126,7 @@

    UFDTLIA (Cloud Track)

    - + Vampire 0 00.0000100 @@ -146,7 +146,7 @@

    UFDTLIA (Cloud Track)

    - + cvc5 0 110.751110090 @@ -155,7 +155,7 @@

    UFDTLIA (Cloud Track)

    - + Vampire 0 00.0000190 @@ -175,7 +175,7 @@

    UFDTLIA (Cloud Track)

    - + cvc5 0 1364.905101090 @@ -184,7 +184,7 @@

    UFDTLIA (Cloud Track)

    - + Vampire 0 00.0000190 @@ -204,7 +204,7 @@

    UFDTLIA (Cloud Track)

    - + cvc5 0 110.75111099 @@ -213,7 +213,7 @@

    UFDTLIA (Cloud Track)

    - + Vampire 0 00.00001010 @@ -237,7 +237,6 @@

    UFDTLIA (Cloud Track)

    - + - diff --git a/archive/2023/results/ufdtlia-parallel.html b/archive/2023/results/ufdtlia-parallel.html index ed1f0a64..7823101b 100644 --- a/archive/2023/results/ufdtlia-parallel.html +++ b/archive/2023/results/ufdtlia-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFDTLIA (Parallel Track)

    Competition results for the UFDTLIA - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    UFDTLIA (Parallel Track)

    - + Vampire 0 00.000040 @@ -126,7 +126,7 @@

    UFDTLIA (Parallel Track)

    - + iProver 0 00.000040 @@ -146,7 +146,7 @@

    UFDTLIA (Parallel Track)

    - + Vampire 0 00.0000040 @@ -155,7 +155,7 @@

    UFDTLIA (Parallel Track)

    - + iProver 0 00.0000040 @@ -175,7 +175,7 @@

    UFDTLIA (Parallel Track)

    - + Vampire 0 00.0000040 @@ -184,7 +184,7 @@

    UFDTLIA (Parallel Track)

    - + iProver 0 00.0000040 @@ -204,7 +204,7 @@

    UFDTLIA (Parallel Track)

    - + Vampire 0 00.000044 @@ -213,7 +213,7 @@

    UFDTLIA (Parallel Track)

    - + iProver 0 00.000044 @@ -237,7 +237,6 @@

    UFDTLIA (Parallel Track)

    - + - diff --git a/archive/2023/results/ufdtlia-proof-exhibition.html b/archive/2023/results/ufdtlia-proof-exhibition.html index c093cfaa..e872a3e1 100644 --- a/archive/2023/results/ufdtlia-proof-exhibition.html +++ b/archive/2023/results/ufdtlia-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFDTLIA (Proof Exhibition Track)

    Competition results for the UFDTLIA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    UFDTLIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 30 @@ -130,7 +130,7 @@

    UFDTLIA (Proof Exhibition Track)

    - + cvc5 0 30 @@ -141,7 +141,7 @@

    UFDTLIA (Proof Exhibition Track)

    - + SMTInterpol 0 20 @@ -163,7 +163,7 @@

    UFDTLIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 30931.651927.166172172 @@ -172,7 +172,7 @@

    UFDTLIA (Proof Exhibition Track)

    - + cvc5 0 301271.5971243.314172172 @@ -181,7 +181,7 @@

    UFDTLIA (Proof Exhibition Track)

    - + SMTInterpol 0 202948.62559.438182182 @@ -205,7 +205,6 @@

    UFDTLIA (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/ufdtlia-single-query.html b/archive/2023/results/ufdtlia-single-query.html index 2c8d57ca..816df8d3 100644 --- a/archive/2023/results/ufdtlia-single-query.html +++ b/archive/2023/results/ufdtlia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFDTLIA (Single Query Track)

    Competition results for the UFDTLIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UFDTLIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5— - - + + cvc5 - - + + Vampire - + @@ -131,7 +131,7 @@

    UFDTLIA (Single Query Track)

    - + cvc5 0 193 @@ -142,7 +142,7 @@

    UFDTLIA (Single Query Track)

    - + 2022-cvc5n 0 191 @@ -153,7 +153,7 @@

    UFDTLIA (Single Query Track)

    - + Vampire 0 129 @@ -164,7 +164,7 @@

    UFDTLIA (Single Query Track)

    - + iProver Fixedn 0 25 @@ -175,7 +175,7 @@

    UFDTLIA (Single Query Track)

    - + iProver 0 24 @@ -186,7 +186,7 @@

    UFDTLIA (Single Query Track)

    - + SMTInterpol 0 20 @@ -208,7 +208,7 @@

    UFDTLIA (Single Query Track)

    - + cvc5 0 19358307.48260034.71919301938585 @@ -217,7 +217,7 @@

    UFDTLIA (Single Query Track)

    - + 2022-cvc5n 0 19156609.11958260.6819101918787 @@ -226,7 +226,7 @@

    UFDTLIA (Single Query Track)

    - + Vampire 0 13113314.7173366.1281310131147147 @@ -235,7 +235,7 @@

    UFDTLIA (Single Query Track)

    - + iProver 0 2712482.2363176.29527027251251 @@ -244,7 +244,7 @@

    UFDTLIA (Single Query Track)

    - + iProver Fixedn 0 266723.4151722.53626026252252 @@ -253,7 +253,7 @@

    UFDTLIA (Single Query Track)

    - + SMTInterpol 0 202914.8922531.83820020258257 @@ -273,7 +273,7 @@

    UFDTLIA (Single Query Track)

    - + 2022-cvc5n 0 00.00.0000027887 @@ -282,7 +282,7 @@

    UFDTLIA (Single Query Track)

    - + Vampire 0 00.00.00000278147 @@ -291,7 +291,7 @@

    UFDTLIA (Single Query Track)

    - + cvc5 0 00.00.0000027885 @@ -300,7 +300,7 @@

    UFDTLIA (Single Query Track)

    - + SMTInterpol 0 00.00.00000278257 @@ -309,7 +309,7 @@

    UFDTLIA (Single Query Track)

    - + iProver 0 00.00.00000278251 @@ -318,7 +318,7 @@

    UFDTLIA (Single Query Track)

    - + iProver Fixedn 0 00.00.00000278252 @@ -338,7 +338,7 @@

    UFDTLIA (Single Query Track)

    - + cvc5 0 19358307.48260034.719193019397685 @@ -347,7 +347,7 @@

    UFDTLIA (Single Query Track)

    - + 2022-cvc5n 0 19156609.11958260.681910191117687 @@ -356,7 +356,7 @@

    UFDTLIA (Single Query Track)

    - + Vampire 0 13113314.7173366.12813101317176147 @@ -365,7 +365,7 @@

    UFDTLIA (Single Query Track)

    - + iProver 0 2712482.2363176.2952702717576251 @@ -374,7 +374,7 @@

    UFDTLIA (Single Query Track)

    - + iProver Fixedn 0 266723.4151722.5362602617676252 @@ -383,7 +383,7 @@

    UFDTLIA (Single Query Track)

    - + SMTInterpol 0 202914.8922531.8382002018276257 @@ -403,7 +403,7 @@

    UFDTLIA (Single Query Track)

    - + Vampire 0 106892.077235.4621060106172172 @@ -412,7 +412,7 @@

    UFDTLIA (Single Query Track)

    - + cvc5 0 2522.91522.91125025253253 @@ -421,7 +421,7 @@

    UFDTLIA (Single Query Track)

    - + 2022-cvc5n 0 2441.63341.62424024254254 @@ -430,7 +430,7 @@

    UFDTLIA (Single Query Track)

    - + SMTInterpol 0 1149.65421.15111011267267 @@ -439,7 +439,7 @@

    UFDTLIA (Single Query Track)

    - + iProver Fixedn 0 11176.08848.76111011267267 @@ -448,7 +448,7 @@

    UFDTLIA (Single Query Track)

    - + iProver 0 10143.70740.07210010268268 @@ -472,7 +472,6 @@

    UFDTLIA (Single Query Track)

    - + - diff --git a/archive/2023/results/ufdtlia-unsat-core.html b/archive/2023/results/ufdtlia-unsat-core.html index d8d9812e..5e98aa68 100644 --- a/archive/2023/results/ufdtlia-unsat-core.html +++ b/archive/2023/results/ufdtlia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFDTLIA (Unsat Core Track)

    Competition results for the UFDTLIA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    UFDTLIA (Unsat Core Track)

    Sequential PerformanceParallel Performance VampireVampire - - + + @@ -126,7 +126,7 @@

    UFDTLIA (Unsat Core Track)

    - + Vampire 0 1606 @@ -137,7 +137,7 @@

    UFDTLIA (Unsat Core Track)

    - + 2021-cvc5-ucn 0 444 @@ -148,7 +148,7 @@

    UFDTLIA (Unsat Core Track)

    - + cvc5 0 441 @@ -159,7 +159,7 @@

    UFDTLIA (Unsat Core Track)

    - + SMTInterpol 0 153 @@ -181,7 +181,7 @@

    UFDTLIA (Unsat Core Track)

    - + Vampire 0 16063551.999907.60972 @@ -190,7 +190,7 @@

    UFDTLIA (Unsat Core Track)

    - + 2021-cvc5-ucn 0 4441963.0641963.333172 @@ -199,7 +199,7 @@

    UFDTLIA (Unsat Core Track)

    - + cvc5 0 4411944.3991944.876171 @@ -208,7 +208,7 @@

    UFDTLIA (Unsat Core Track)

    - + SMTInterpol 0 1532874.4752497.243182 @@ -232,7 +232,6 @@

    UFDTLIA (Unsat Core Track)

    - + - diff --git a/archive/2023/results/ufdtlira-cloud.html b/archive/2023/results/ufdtlira-cloud.html index 7262273f..fc6d853b 100644 --- a/archive/2023/results/ufdtlira-cloud.html +++ b/archive/2023/results/ufdtlira-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFDTLIRA (Cloud Track)

    Competition results for the UFDTLIRA - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    UFDTLIRA (Cloud Track)

    - + cvc5 0 1684.06316160110 @@ -126,7 +126,7 @@

    UFDTLIRA (Cloud Track)

    - + Vampire 13 106439.13810010170 @@ -146,7 +146,7 @@

    UFDTLIRA (Cloud Track)

    - + cvc5 0 1684.063161600110 @@ -155,7 +155,7 @@

    UFDTLIRA (Cloud Track)

    - + Vampire 0 00.000016110 @@ -175,7 +175,7 @@

    UFDTLIRA (Cloud Track)

    - + cvc5 0 00.000010170 @@ -184,7 +184,7 @@

    UFDTLIRA (Cloud Track)

    - + Vampire 13 106439.138100100170 @@ -204,7 +204,7 @@

    UFDTLIRA (Cloud Track)

    - + cvc5 0 1684.06316160118 @@ -213,7 +213,7 @@

    UFDTLIRA (Cloud Track)

    - + Vampire 0 00.00002727 @@ -237,7 +237,6 @@

    UFDTLIRA (Cloud Track)

    - + - diff --git a/archive/2023/results/ufdtlira-parallel.html b/archive/2023/results/ufdtlira-parallel.html index 9961faf0..7f67ac3f 100644 --- a/archive/2023/results/ufdtlira-parallel.html +++ b/archive/2023/results/ufdtlira-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFDTLIRA (Parallel Track)

    Competition results for the UFDTLIRA - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    UFDTLIRA (Parallel Track)

    - + iProver 0 00.0000240 @@ -126,7 +126,7 @@

    UFDTLIRA (Parallel Track)

    - + Vampire 12 91129.671909150 @@ -146,7 +146,7 @@

    UFDTLIRA (Parallel Track)

    - + Vampire 0 00.000012120 @@ -155,7 +155,7 @@

    UFDTLIRA (Parallel Track)

    - + iProver 0 00.000012120 @@ -175,7 +175,7 @@

    UFDTLIRA (Parallel Track)

    - + iProver 0 00.00009150 @@ -184,7 +184,7 @@

    UFDTLIRA (Parallel Track)

    - + Vampire 12 91129.6719090150 @@ -204,7 +204,7 @@

    UFDTLIRA (Parallel Track)

    - + Vampire 0 00.00002424 @@ -213,7 +213,7 @@

    UFDTLIRA (Parallel Track)

    - + iProver 0 00.00002424 @@ -237,7 +237,6 @@

    UFDTLIRA (Parallel Track)

    - + - diff --git a/archive/2023/results/ufdtlira-proof-exhibition.html b/archive/2023/results/ufdtlira-proof-exhibition.html index b458415a..5ad9a8d8 100644 --- a/archive/2023/results/ufdtlira-proof-exhibition.html +++ b/archive/2023/results/ufdtlira-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFDTLIRA (Proof Exhibition Track)

    Competition results for the UFDTLIRA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    UFDTLIRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 1462 @@ -130,7 +130,7 @@

    UFDTLIRA (Proof Exhibition Track)

    - + cvc5 0 1457 @@ -141,7 +141,7 @@

    UFDTLIRA (Proof Exhibition Track)

    - + SMTInterpol 0 1383 @@ -163,7 +163,7 @@

    UFDTLIRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 1462280.647274.3392 @@ -172,7 +172,7 @@

    UFDTLIRA (Proof Exhibition Track)

    - + cvc5 0 14573184.773089.069145 @@ -181,7 +181,7 @@

    UFDTLIRA (Proof Exhibition Track)

    - + SMTInterpol 0 13838093.754666.9558878 @@ -205,7 +205,6 @@

    UFDTLIRA (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/ufdtlira-single-query.html b/archive/2023/results/ufdtlira-single-query.html index d6a56f2d..21f0d0ed 100644 --- a/archive/2023/results/ufdtlira-single-query.html +++ b/archive/2023/results/ufdtlira-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFDTLIRA (Single Query Track)

    Competition results for the UFDTLIRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UFDTLIRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    UFDTLIRA (Single Query Track)

    - + cvc5 0 2691 @@ -142,7 +142,7 @@

    UFDTLIRA (Single Query Track)

    - + 2022-cvc5n 0 2691 @@ -153,7 +153,7 @@

    UFDTLIRA (Single Query Track)

    - + SMTInterpol 0 2574 @@ -164,7 +164,7 @@

    UFDTLIRA (Single Query Track)

    - + Vampire 0 2125 @@ -175,7 +175,7 @@

    UFDTLIRA (Single Query Track)

    - + iProver Fixedn 0 1969 @@ -186,7 +186,7 @@

    UFDTLIRA (Single Query Track)

    - + iProver 0 1966 @@ -208,7 +208,7 @@

    UFDTLIRA (Single Query Track)

    - + cvc5 0 26911040.7071272.73326915312160438185 @@ -217,7 +217,7 @@

    UFDTLIRA (Single Query Track)

    - + 2022-cvc5n 0 26912101.252159.51526915312160438185 @@ -226,7 +226,7 @@

    UFDTLIRA (Single Query Track)

    - + SMTInterpol 0 257411986.6387880.51525744792095555122 @@ -235,7 +235,7 @@

    UFDTLIRA (Single Query Track)

    - + iProver Fixedn 0 2001103253.42626960.27620010200111281128 @@ -244,7 +244,7 @@

    UFDTLIRA (Single Query Track)

    - + iProver 0 2000106576.20227725.35520000200011291129 @@ -253,7 +253,7 @@

    UFDTLIRA (Single Query Track)

    - + Vampire 17 2144145768.5736901.1222144122132985968 @@ -273,7 +273,7 @@

    UFDTLIRA (Single Query Track)

    - + 2022-cvc5n 0 5311043.1421076.655315310122586185 @@ -282,7 +282,7 @@

    UFDTLIRA (Single Query Track)

    - + cvc5 0 531848.7281078.2355315310122586185 @@ -291,7 +291,7 @@

    UFDTLIRA (Single Query Track)

    - + SMTInterpol 0 479408.205280.684794790642586122 @@ -300,7 +300,7 @@

    UFDTLIRA (Single Query Track)

    - + Vampire 0 122029.855512.418121205312586968 @@ -309,7 +309,7 @@

    UFDTLIRA (Single Query Track)

    - + iProver 0 00.00.000054325861129 @@ -318,7 +318,7 @@

    UFDTLIRA (Single Query Track)

    - + iProver Fixedn 0 00.00.000054325861128 @@ -338,7 +338,7 @@

    UFDTLIRA (Single Query Track)

    - + cvc5 0 2160191.979194.4992160021608961185 @@ -347,7 +347,7 @@

    UFDTLIRA (Single Query Track)

    - + 2022-cvc5n 0 21601058.1091082.8662160021608961185 @@ -356,7 +356,7 @@

    UFDTLIRA (Single Query Track)

    - + SMTInterpol 0 209511578.4327599.83620950209573961122 @@ -365,7 +365,7 @@

    UFDTLIRA (Single Query Track)

    - + iProver Fixedn 0 2001103253.42626960.2762001020011679611128 @@ -374,7 +374,7 @@

    UFDTLIRA (Single Query Track)

    - + iProver 0 2000106576.20227725.3552000020001689611129 @@ -383,7 +383,7 @@

    UFDTLIRA (Single Query Track)

    - + Vampire 17 2132143738.71536388.70521320213236961968 @@ -403,7 +403,7 @@

    UFDTLIRA (Single Query Track)

    - + 2022-cvc5n 0 2681482.692511.96126815262155448253 @@ -412,7 +412,7 @@

    UFDTLIRA (Single Query Track)

    - + cvc5 0 2679302.827309.19226795212158450253 @@ -421,7 +421,7 @@

    UFDTLIRA (Single Query Track)

    - + SMTInterpol 0 25405942.1652698.05425404792061589160 @@ -430,7 +430,7 @@

    UFDTLIRA (Single Query Track)

    - + iProver Fixedn 0 18616886.5192455.8218610186112681268 @@ -439,7 +439,7 @@

    UFDTLIRA (Single Query Track)

    - + iProver 0 18587085.6132486.53218580185812711271 @@ -448,7 +448,7 @@

    UFDTLIRA (Single Query Track)

    - + Vampire 0 18094270.1181238.94518090180913201320 @@ -472,7 +472,6 @@

    UFDTLIRA (Single Query Track)

    - + - diff --git a/archive/2023/results/ufdtlira-unsat-core.html b/archive/2023/results/ufdtlira-unsat-core.html index b9706271..9a8bdb74 100644 --- a/archive/2023/results/ufdtlira-unsat-core.html +++ b/archive/2023/results/ufdtlira-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFDTLIRA (Unsat Core Track)

    Competition results for the UFDTLIRA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    UFDTLIRA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    UFDTLIRA (Unsat Core Track)

    - + 2021-cvc5-ucn 0 76127 @@ -137,7 +137,7 @@

    UFDTLIRA (Unsat Core Track)

    - + cvc5 0 73379 @@ -148,7 +148,7 @@

    UFDTLIRA (Unsat Core Track)

    - + SMTInterpol 0 73090 @@ -159,7 +159,7 @@

    UFDTLIRA (Unsat Core Track)

    - + Vampire 0 6686 @@ -181,7 +181,7 @@

    UFDTLIRA (Unsat Core Track)

    - + 2021-cvc5-ucn 0 76127172.115171.1323 @@ -190,7 +190,7 @@

    UFDTLIRA (Unsat Core Track)

    - + cvc5 0 73379166.3165.2457 @@ -199,7 +199,7 @@

    UFDTLIRA (Unsat Core Track)

    - + SMTInterpol 0 7309012522.5477956.01179 @@ -208,7 +208,7 @@

    UFDTLIRA (Unsat Core Track)

    - + Vampire 0 668631.67930.23938 @@ -232,7 +232,6 @@

    UFDTLIRA (Unsat Core Track)

    - + - diff --git a/archive/2023/results/ufdtnia-incremental.html b/archive/2023/results/ufdtnia-incremental.html index eb3a9d8e..b3e9eec6 100644 --- a/archive/2023/results/ufdtnia-incremental.html +++ b/archive/2023/results/ufdtnia-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFDTNIA (Incremental Track)

    Competition results for the UFDTNIA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    UFDTNIA (Incremental Track)

    Parallel Performance cvc5 - - + + @@ -124,7 +124,7 @@

    UFDTNIA (Incremental Track)

    - + cvc5 0 3841132.871148.336256 @@ -133,7 +133,7 @@

    UFDTNIA (Incremental Track)

    - + SMTInterpol 0 00.00.07460 @@ -157,7 +157,6 @@

    UFDTNIA (Incremental Track)

    - + - diff --git a/archive/2023/results/ufdtnia-proof-exhibition.html b/archive/2023/results/ufdtnia-proof-exhibition.html index b68c4549..f439e741 100644 --- a/archive/2023/results/ufdtnia-proof-exhibition.html +++ b/archive/2023/results/ufdtnia-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFDTNIA (Proof Exhibition Track)

    Competition results for the UFDTNIA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    UFDTNIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 1 @@ -130,7 +130,7 @@

    UFDTNIA (Proof Exhibition Track)

    - + cvc5 0 1 @@ -152,7 +152,7 @@

    UFDTNIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 15.9335.93300 @@ -161,7 +161,7 @@

    UFDTNIA (Proof Exhibition Track)

    - + cvc5 0 17.8777.85200 @@ -185,7 +185,6 @@

    UFDTNIA (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/ufdtnia-single-query.html b/archive/2023/results/ufdtnia-single-query.html index fac29f8c..803c075a 100644 --- a/archive/2023/results/ufdtnia-single-query.html +++ b/archive/2023/results/ufdtnia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFDTNIA (Single Query Track)

    Competition results for the UFDTNIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UFDTNIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) VampireVampire— - - + + Vampire - - + + Vampire - + @@ -131,7 +131,7 @@

    UFDTNIA (Single Query Track)

    - + Vampire 0 1 @@ -142,7 +142,7 @@

    UFDTNIA (Single Query Track)

    - + cvc5 0 1 @@ -153,7 +153,7 @@

    UFDTNIA (Single Query Track)

    - + 2022-cvc5n 0 1 @@ -164,7 +164,7 @@

    UFDTNIA (Single Query Track)

    - + iProver 0 1 @@ -175,7 +175,7 @@

    UFDTNIA (Single Query Track)

    - + iProver Fixedn 0 1 @@ -197,7 +197,7 @@

    UFDTNIA (Single Query Track)

    - + Vampire 0 10.0890.08810111 @@ -206,7 +206,7 @@

    UFDTNIA (Single Query Track)

    - + cvc5 0 14.9544.95410110 @@ -215,7 +215,7 @@

    UFDTNIA (Single Query Track)

    - + 2022-cvc5n 0 112.97312.97910110 @@ -224,7 +224,7 @@

    UFDTNIA (Single Query Track)

    - + iProver 0 1235.9960.84410111 @@ -233,7 +233,7 @@

    UFDTNIA (Single Query Track)

    - + iProver Fixedn 0 1268.3269.20910111 @@ -253,7 +253,7 @@

    UFDTNIA (Single Query Track)

    - + 2022-cvc5n 0 00.00.0000020 @@ -262,7 +262,7 @@

    UFDTNIA (Single Query Track)

    - + Vampire 0 00.00.0000021 @@ -271,7 +271,7 @@

    UFDTNIA (Single Query Track)

    - + cvc5 0 00.00.0000020 @@ -280,7 +280,7 @@

    UFDTNIA (Single Query Track)

    - + iProver 0 00.00.0000021 @@ -289,7 +289,7 @@

    UFDTNIA (Single Query Track)

    - + iProver Fixedn 0 00.00.0000021 @@ -309,7 +309,7 @@

    UFDTNIA (Single Query Track)

    - + Vampire 0 10.0890.088101011 @@ -318,7 +318,7 @@

    UFDTNIA (Single Query Track)

    - + cvc5 0 14.9544.954101010 @@ -327,7 +327,7 @@

    UFDTNIA (Single Query Track)

    - + 2022-cvc5n 0 112.97312.979101010 @@ -336,7 +336,7 @@

    UFDTNIA (Single Query Track)

    - + iProver 0 1235.9960.844101011 @@ -345,7 +345,7 @@

    UFDTNIA (Single Query Track)

    - + iProver Fixedn 0 1268.3269.209101011 @@ -365,7 +365,7 @@

    UFDTNIA (Single Query Track)

    - + Vampire 0 10.0890.08810111 @@ -374,7 +374,7 @@

    UFDTNIA (Single Query Track)

    - + cvc5 0 14.9544.95410110 @@ -383,7 +383,7 @@

    UFDTNIA (Single Query Track)

    - + 2022-cvc5n 0 112.97312.97910110 @@ -392,7 +392,7 @@

    UFDTNIA (Single Query Track)

    - + iProver 0 00.00.000022 @@ -401,7 +401,7 @@

    UFDTNIA (Single Query Track)

    - + iProver Fixedn 0 00.00.000022 @@ -425,7 +425,6 @@

    UFDTNIA (Single Query Track)

    - + - diff --git a/archive/2023/results/ufdtnia-unsat-core.html b/archive/2023/results/ufdtnia-unsat-core.html index e75e538e..8a980850 100644 --- a/archive/2023/results/ufdtnia-unsat-core.html +++ b/archive/2023/results/ufdtnia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFDTNIA (Unsat Core Track)

    Competition results for the UFDTNIA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    UFDTNIA (Unsat Core Track)

    Sequential PerformanceParallel Performance VampireVampire - - + + @@ -126,7 +126,7 @@

    UFDTNIA (Unsat Core Track)

    - + Vampire 0 24 @@ -137,7 +137,7 @@

    UFDTNIA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 23 @@ -148,7 +148,7 @@

    UFDTNIA (Unsat Core Track)

    - + cvc5 0 23 @@ -170,7 +170,7 @@

    UFDTNIA (Unsat Core Track)

    - + Vampire 0 240.090.0910 @@ -179,7 +179,7 @@

    UFDTNIA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 237.7037.7030 @@ -188,7 +188,7 @@

    UFDTNIA (Unsat Core Track)

    - + cvc5 0 237.9797.9790 @@ -212,7 +212,6 @@

    UFDTNIA (Unsat Core Track)

    - + - diff --git a/archive/2023/results/ufdtnira-cloud.html b/archive/2023/results/ufdtnira-cloud.html index bd83ab5a..59a361dc 100644 --- a/archive/2023/results/ufdtnira-cloud.html +++ b/archive/2023/results/ufdtnira-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFDTNIRA (Cloud Track)

    Competition results for the UFDTNIRA - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    UFDTNIRA (Cloud Track)

    - + Vampire 0 2117651.022102160 @@ -126,7 +126,7 @@

    UFDTNIRA (Cloud Track)

    - + cvc5 0 19.163101260 @@ -146,7 +146,7 @@

    UFDTNIRA (Cloud Track)

    - + Vampire 0 00.00000240 @@ -155,7 +155,7 @@

    UFDTNIRA (Cloud Track)

    - + cvc5 0 00.00000240 @@ -175,7 +175,7 @@

    UFDTNIRA (Cloud Track)

    - + Vampire 0 2117651.0221021210 @@ -184,7 +184,7 @@

    UFDTNIRA (Cloud Track)

    - + cvc5 0 19.1631012210 @@ -204,7 +204,7 @@

    UFDTNIRA (Cloud Track)

    - + cvc5 0 19.1631012621 @@ -213,7 +213,7 @@

    UFDTNIRA (Cloud Track)

    - + Vampire 0 00.00002727 @@ -237,7 +237,6 @@

    UFDTNIRA (Cloud Track)

    - + - diff --git a/archive/2023/results/ufdtnira-parallel.html b/archive/2023/results/ufdtnira-parallel.html index 172ea2df..14ec3b13 100644 --- a/archive/2023/results/ufdtnira-parallel.html +++ b/archive/2023/results/ufdtnira-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFDTNIRA (Parallel Track)

    Competition results for the UFDTNIRA - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    UFDTNIRA (Parallel Track)

    - + Vampire 0 91718.13590930 @@ -126,7 +126,7 @@

    UFDTNIRA (Parallel Track)

    - + iProver 0 00.0000120 @@ -146,7 +146,7 @@

    UFDTNIRA (Parallel Track)

    - + Vampire 0 00.00000110 @@ -155,7 +155,7 @@

    UFDTNIRA (Parallel Track)

    - + iProver 0 00.00000110 @@ -175,7 +175,7 @@

    UFDTNIRA (Parallel Track)

    - + Vampire 0 91718.135909110 @@ -184,7 +184,7 @@

    UFDTNIRA (Parallel Track)

    - + iProver 0 00.00001010 @@ -204,7 +204,7 @@

    UFDTNIRA (Parallel Track)

    - + Vampire 0 00.00001212 @@ -213,7 +213,7 @@

    UFDTNIRA (Parallel Track)

    - + iProver 0 00.00001212 @@ -237,7 +237,6 @@

    UFDTNIRA (Parallel Track)

    - + - diff --git a/archive/2023/results/ufdtnira-proof-exhibition.html b/archive/2023/results/ufdtnira-proof-exhibition.html index 399e45bd..5e5bcba8 100644 --- a/archive/2023/results/ufdtnira-proof-exhibition.html +++ b/archive/2023/results/ufdtnira-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFDTNIRA (Proof Exhibition Track)

    Competition results for the UFDTNIRA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    UFDTNIRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 1235 @@ -130,7 +130,7 @@

    UFDTNIRA (Proof Exhibition Track)

    - + cvc5 0 1224 @@ -152,7 +152,7 @@

    UFDTNIRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 1235600.037590.771119 @@ -161,7 +161,7 @@

    UFDTNIRA (Proof Exhibition Track)

    - + cvc5 0 122412498.41812216.4422212 @@ -185,7 +185,6 @@

    UFDTNIRA (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/ufdtnira-single-query.html b/archive/2023/results/ufdtnira-single-query.html index 2f8d6656..3c4b974c 100644 --- a/archive/2023/results/ufdtnira-single-query.html +++ b/archive/2023/results/ufdtnira-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFDTNIRA (Single Query Track)

    Competition results for the UFDTNIRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UFDTNIRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5— - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    UFDTNIRA (Single Query Track)

    - + 2022-cvc5n 0 1962 @@ -142,7 +142,7 @@

    UFDTNIRA (Single Query Track)

    - + cvc5 0 1959 @@ -153,7 +153,7 @@

    UFDTNIRA (Single Query Track)

    - + Vampire 0 1811 @@ -164,7 +164,7 @@

    UFDTNIRA (Single Query Track)

    - + iProver 0 1609 @@ -175,7 +175,7 @@

    UFDTNIRA (Single Query Track)

    - + iProver Fixedn 0 1602 @@ -197,7 +197,7 @@

    UFDTNIRA (Single Query Track)

    - + 2022-cvc5n 0 19621274.5371273.777196201962187114 @@ -206,7 +206,7 @@

    UFDTNIRA (Single Query Track)

    - + cvc5 0 1959480.882479.041195901959190120 @@ -215,7 +215,7 @@

    UFDTNIRA (Single Query Track)

    - + Vampire 0 1845177633.29144876.198184501845304304 @@ -224,7 +224,7 @@

    UFDTNIRA (Single Query Track)

    - + iProver 0 162396497.56725310.068162301623526526 @@ -233,7 +233,7 @@

    UFDTNIRA (Single Query Track)

    - + iProver Fixedn 0 1621102220.59726636.048162101621528528 @@ -253,7 +253,7 @@

    UFDTNIRA (Single Query Track)

    - + 2022-cvc5n 0 00.00.000002149114 @@ -262,7 +262,7 @@

    UFDTNIRA (Single Query Track)

    - + Vampire 0 00.00.000002149304 @@ -271,7 +271,7 @@

    UFDTNIRA (Single Query Track)

    - + cvc5 0 00.00.000002149120 @@ -280,7 +280,7 @@

    UFDTNIRA (Single Query Track)

    - + iProver 0 00.00.000002149526 @@ -289,7 +289,7 @@

    UFDTNIRA (Single Query Track)

    - + iProver Fixedn 0 00.00.000002149528 @@ -309,7 +309,7 @@

    UFDTNIRA (Single Query Track)

    - + 2022-cvc5n 0 19621274.5371273.77719620196216171114 @@ -318,7 +318,7 @@

    UFDTNIRA (Single Query Track)

    - + cvc5 0 1959480.882479.04119590195919171120 @@ -327,7 +327,7 @@

    UFDTNIRA (Single Query Track)

    - + Vampire 0 1845177633.29144876.198184501845133171304 @@ -336,7 +336,7 @@

    UFDTNIRA (Single Query Track)

    - + iProver 0 162396497.56725310.068162301623355171526 @@ -345,7 +345,7 @@

    UFDTNIRA (Single Query Track)

    - + iProver Fixedn 0 1621102220.59726636.048162101621357171528 @@ -365,7 +365,7 @@

    UFDTNIRA (Single Query Track)

    - + cvc5 0 1958201.41199.514195801958191124 @@ -374,7 +374,7 @@

    UFDTNIRA (Single Query Track)

    - + 2022-cvc5n 0 1958497.79496.812195801958191122 @@ -383,7 +383,7 @@

    UFDTNIRA (Single Query Track)

    - + Vampire 0 14945078.9411418.503149401494655655 @@ -392,7 +392,7 @@

    UFDTNIRA (Single Query Track)

    - + iProver Fixedn 0 14577597.8692509.444145701457692692 @@ -401,7 +401,7 @@

    UFDTNIRA (Single Query Track)

    - + iProver 0 14567705.0732523.411145601456693693 @@ -425,7 +425,6 @@

    UFDTNIRA (Single Query Track)

    - + - diff --git a/archive/2023/results/ufdtnira-unsat-core.html b/archive/2023/results/ufdtnira-unsat-core.html index 3ef8d72d..320a10c1 100644 --- a/archive/2023/results/ufdtnira-unsat-core.html +++ b/archive/2023/results/ufdtnira-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFDTNIRA (Unsat Core Track)

    Competition results for the UFDTNIRA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    UFDTNIRA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    UFDTNIRA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 77286 @@ -137,7 +137,7 @@

    UFDTNIRA (Unsat Core Track)

    - + cvc5 0 76769 @@ -148,7 +148,7 @@

    UFDTNIRA (Unsat Core Track)

    - + Vampire 0 8367 @@ -170,7 +170,7 @@

    UFDTNIRA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 77286207.007206.84311 @@ -179,7 +179,7 @@

    UFDTNIRA (Unsat Core Track)

    - + cvc5 0 76769451.089450.46110 @@ -188,7 +188,7 @@

    UFDTNIRA (Unsat Core Track)

    - + Vampire 0 836717.30118.435147 @@ -212,7 +212,6 @@

    UFDTNIRA (Unsat Core Track)

    - + - diff --git a/archive/2023/results/uffpdtnira-proof-exhibition.html b/archive/2023/results/uffpdtnira-proof-exhibition.html index 45e1e120..857a3b3e 100644 --- a/archive/2023/results/uffpdtnira-proof-exhibition.html +++ b/archive/2023/results/uffpdtnira-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFFPDTNIRA (Proof Exhibition Track)

    Competition results for the UFFPDTNIRA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    UFFPDTNIRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 124 @@ -130,7 +130,7 @@

    UFFPDTNIRA (Proof Exhibition Track)

    - + cvc5 0 84 @@ -152,7 +152,7 @@

    UFFPDTNIRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 1241624.1561623.7892626 @@ -161,7 +161,7 @@

    UFFPDTNIRA (Proof Exhibition Track)

    - + cvc5 0 84125.579124.426666 @@ -185,7 +185,6 @@

    UFFPDTNIRA (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/ufidl-parallel.html b/archive/2023/results/ufidl-parallel.html index 23d283ce..ef8461e6 100644 --- a/archive/2023/results/ufidl-parallel.html +++ b/archive/2023/results/ufidl-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFIDL (Parallel Track)

    Competition results for the UFIDL - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    UFIDL (Parallel Track)

    - + Vampire 0 00.000020 @@ -126,7 +126,7 @@

    UFIDL (Parallel Track)

    - + iProver 0 00.000020 @@ -146,7 +146,7 @@

    UFIDL (Parallel Track)

    - + Vampire 0 00.0000020 @@ -155,7 +155,7 @@

    UFIDL (Parallel Track)

    - + iProver 0 00.0000020 @@ -175,7 +175,7 @@

    UFIDL (Parallel Track)

    - + Vampire 0 00.0000020 @@ -184,7 +184,7 @@

    UFIDL (Parallel Track)

    - + iProver 0 00.0000020 @@ -204,7 +204,7 @@

    UFIDL (Parallel Track)

    - + Vampire 0 00.000022 @@ -213,7 +213,7 @@

    UFIDL (Parallel Track)

    - + iProver 0 00.000022 @@ -237,7 +237,6 @@

    UFIDL (Parallel Track)

    - + - diff --git a/archive/2023/results/ufidl-proof-exhibition.html b/archive/2023/results/ufidl-proof-exhibition.html index 837c5109..64593a0c 100644 --- a/archive/2023/results/ufidl-proof-exhibition.html +++ b/archive/2023/results/ufidl-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFIDL (Proof Exhibition Track)

    Competition results for the UFIDL - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    UFIDL (Proof Exhibition Track)

    - + cvc5-lfsc 0 57 @@ -130,7 +130,7 @@

    UFIDL (Proof Exhibition Track)

    - + cvc5 0 55 @@ -141,7 +141,7 @@

    UFIDL (Proof Exhibition Track)

    - + SMTInterpol 0 54 @@ -163,7 +163,7 @@

    UFIDL (Proof Exhibition Track)

    - + cvc5-lfsc 0 5710.13510.08500 @@ -172,7 +172,7 @@

    UFIDL (Proof Exhibition Track)

    - + cvc5 0 55310.576307.18820 @@ -181,7 +181,7 @@

    UFIDL (Proof Exhibition Track)

    - + SMTInterpol 0 54658.919345.14333 @@ -205,7 +205,6 @@

    UFIDL (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/ufidl-single-query.html b/archive/2023/results/ufidl-single-query.html index bcb7811e..90318894 100644 --- a/archive/2023/results/ufidl-single-query.html +++ b/archive/2023/results/ufidl-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFIDL (Single Query Track)

    Competition results for the UFIDL - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UFIDL (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5SMTInterpol - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    UFIDL (Single Query Track)

    - + cvc5 0 10 @@ -142,7 +142,7 @@

    UFIDL (Single Query Track)

    - + 2022-cvc5n 0 10 @@ -153,7 +153,7 @@

    UFIDL (Single Query Track)

    - + SMTInterpol 0 8 @@ -164,7 +164,7 @@

    UFIDL (Single Query Track)

    - + Vampire 0 7 @@ -175,7 +175,7 @@

    UFIDL (Single Query Track)

    - + iProver 0 7 @@ -186,7 +186,7 @@

    UFIDL (Single Query Track)

    - + iProver Fixedn 0 7 @@ -197,7 +197,7 @@

    UFIDL (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -219,7 +219,7 @@

    UFIDL (Single Query Track)

    - + cvc5 0 10517.399564.7011019108 @@ -228,7 +228,7 @@

    UFIDL (Single Query Track)

    - + 2022-cvc5n 0 10518.666572.9221019108 @@ -237,7 +237,7 @@

    UFIDL (Single Query Track)

    - + SMTInterpol 0 8257.355142.634817127 @@ -246,7 +246,7 @@

    UFIDL (Single Query Track)

    - + Vampire 0 71.2321.2977071313 @@ -255,7 +255,7 @@

    UFIDL (Single Query Track)

    - + iProver 0 7132.40736.6417071311 @@ -264,7 +264,7 @@

    UFIDL (Single Query Track)

    - + iProver Fixedn 0 7185.22950.1467071311 @@ -273,7 +273,7 @@

    UFIDL (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.0000200 @@ -293,7 +293,7 @@

    UFIDL (Single Query Track)

    - + SMTInterpol 0 10.9244.7981102177 @@ -302,7 +302,7 @@

    UFIDL (Single Query Track)

    - + cvc5 0 1515.944563.251102178 @@ -311,7 +311,7 @@

    UFIDL (Single Query Track)

    - + 2022-cvc5n 0 1517.204571.4631102178 @@ -320,7 +320,7 @@

    UFIDL (Single Query Track)

    - + Vampire 0 00.00.000031713 @@ -329,7 +329,7 @@

    UFIDL (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.00003170 @@ -338,7 +338,7 @@

    UFIDL (Single Query Track)

    - + iProver 0 00.00.000031711 @@ -347,7 +347,7 @@

    UFIDL (Single Query Track)

    - + iProver Fixedn 0 00.00.000031711 @@ -367,7 +367,7 @@

    UFIDL (Single Query Track)

    - + cvc5 0 91.4551.4519090118 @@ -376,7 +376,7 @@

    UFIDL (Single Query Track)

    - + 2022-cvc5n 0 91.4621.4599090118 @@ -385,7 +385,7 @@

    UFIDL (Single Query Track)

    - + Vampire 0 71.2321.29770721113 @@ -394,7 +394,7 @@

    UFIDL (Single Query Track)

    - + iProver 0 7132.40736.64170721111 @@ -403,7 +403,7 @@

    UFIDL (Single Query Track)

    - + iProver Fixedn 0 7185.22950.14670721111 @@ -412,7 +412,7 @@

    UFIDL (Single Query Track)

    - + SMTInterpol 0 7256.431137.8367072117 @@ -421,7 +421,7 @@

    UFIDL (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.00009110 @@ -441,7 +441,7 @@

    UFIDL (Single Query Track)

    - + cvc5 0 91.4551.451909119 @@ -450,7 +450,7 @@

    UFIDL (Single Query Track)

    - + 2022-cvc5n 0 91.4621.459909119 @@ -459,7 +459,7 @@

    UFIDL (Single Query Track)

    - + Vampire 0 71.2321.2977071313 @@ -468,7 +468,7 @@

    UFIDL (Single Query Track)

    - + iProver 0 7132.40736.6417071311 @@ -477,7 +477,7 @@

    UFIDL (Single Query Track)

    - + iProver Fixedn 0 7185.22950.1467071311 @@ -486,7 +486,7 @@

    UFIDL (Single Query Track)

    - + SMTInterpol 0 7189.847102.494716138 @@ -495,7 +495,7 @@

    UFIDL (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.0000200 @@ -519,7 +519,6 @@

    UFIDL (Single Query Track)

    - + - diff --git a/archive/2023/results/ufidl-unsat-core.html b/archive/2023/results/ufidl-unsat-core.html index 87fbbdd4..e5ba84fa 100644 --- a/archive/2023/results/ufidl-unsat-core.html +++ b/archive/2023/results/ufidl-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFIDL (Unsat Core Track)

    Competition results for the UFIDL - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    UFIDL (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    UFIDL (Unsat Core Track)

    - + 2021-cvc5-ucn 0 1916 @@ -137,7 +137,7 @@

    UFIDL (Unsat Core Track)

    - + cvc5 0 1913 @@ -148,7 +148,7 @@

    UFIDL (Unsat Core Track)

    - + SMTInterpol 0 983 @@ -159,7 +159,7 @@

    UFIDL (Unsat Core Track)

    - + Vampire 0 459 @@ -170,7 +170,7 @@

    UFIDL (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -192,7 +192,7 @@

    UFIDL (Unsat Core Track)

    - + 2021-cvc5-ucn 0 19161.9921.9880 @@ -201,7 +201,7 @@

    UFIDL (Unsat Core Track)

    - + cvc5 0 19131.9651.9520 @@ -210,7 +210,7 @@

    UFIDL (Unsat Core Track)

    - + SMTInterpol 0 98356.66128.4433 @@ -219,7 +219,7 @@

    UFIDL (Unsat Core Track)

    - + Vampire 0 4591.3161.3372 @@ -228,7 +228,7 @@

    UFIDL (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 00.00.00 @@ -252,7 +252,6 @@

    UFIDL (Unsat Core Track)

    - + - diff --git a/archive/2023/results/uflia-cloud.html b/archive/2023/results/uflia-cloud.html index a5506427..7dbb10be 100644 --- a/archive/2023/results/uflia-cloud.html +++ b/archive/2023/results/uflia-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFLIA (Cloud Track)

    Competition results for the UFLIA - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    UFLIA (Cloud Track)

    - + Vampire 0 91970.577909230 @@ -126,7 +126,7 @@

    UFLIA (Cloud Track)

    - + cvc5 0 81968.582817240 @@ -146,7 +146,7 @@

    UFLIA (Cloud Track)

    - + cvc5 0 14.2561100310 @@ -155,7 +155,7 @@

    UFLIA (Cloud Track)

    - + Vampire 0 00.00001310 @@ -175,7 +175,7 @@

    UFLIA (Cloud Track)

    - + Vampire 0 91970.5779095180 @@ -184,7 +184,7 @@

    UFLIA (Cloud Track)

    - + cvc5 0 71964.3267077180 @@ -204,7 +204,7 @@

    UFLIA (Cloud Track)

    - + cvc5 0 215.2392113030 @@ -213,7 +213,7 @@

    UFLIA (Cloud Track)

    - + Vampire 0 00.00003232 @@ -237,7 +237,6 @@

    UFLIA (Cloud Track)

    - + - diff --git a/archive/2023/results/uflia-parallel.html b/archive/2023/results/uflia-parallel.html index 2e22becc..5eefb003 100644 --- a/archive/2023/results/uflia-parallel.html +++ b/archive/2023/results/uflia-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFLIA (Parallel Track)

    Competition results for the UFLIA - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    UFLIA (Parallel Track)

    - + Vampire 0 7683.08707190 @@ -126,7 +126,7 @@

    UFLIA (Parallel Track)

    - + iProver 0 00.0000260 @@ -146,7 +146,7 @@

    UFLIA (Parallel Track)

    - + Vampire 0 00.00000260 @@ -155,7 +155,7 @@

    UFLIA (Parallel Track)

    - + iProver 0 00.00000260 @@ -175,7 +175,7 @@

    UFLIA (Parallel Track)

    - + Vampire 0 7683.087071180 @@ -184,7 +184,7 @@

    UFLIA (Parallel Track)

    - + iProver 0 00.00008180 @@ -204,7 +204,7 @@

    UFLIA (Parallel Track)

    - + Vampire 0 315.963032323 @@ -213,7 +213,7 @@

    UFLIA (Parallel Track)

    - + iProver 0 00.00002626 @@ -237,7 +237,6 @@

    UFLIA (Parallel Track)

    - + - diff --git a/archive/2023/results/uflia-proof-exhibition.html b/archive/2023/results/uflia-proof-exhibition.html index 4d36afe2..d97b191a 100644 --- a/archive/2023/results/uflia-proof-exhibition.html +++ b/archive/2023/results/uflia-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFLIA (Proof Exhibition Track)

    Competition results for the UFLIA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    UFLIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 1872 @@ -130,7 +130,7 @@

    UFLIA (Proof Exhibition Track)

    - + SMTInterpol 0 1225 @@ -141,7 +141,7 @@

    UFLIA (Proof Exhibition Track)

    - + cvc5 0 1151 @@ -163,7 +163,7 @@

    UFLIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 187213679.62713487.528114114 @@ -172,7 +172,7 @@

    UFLIA (Proof Exhibition Track)

    - + SMTInterpol 0 122529308.13523314.363761754 @@ -181,7 +181,7 @@

    UFLIA (Proof Exhibition Track)

    - + cvc5 0 115112907.60412789.965835776 @@ -205,7 +205,6 @@

    UFLIA (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/uflia-single-query.html b/archive/2023/results/uflia-single-query.html index 3978f048..7b4191d8 100644 --- a/archive/2023/results/uflia-single-query.html +++ b/archive/2023/results/uflia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFLIA (Single Query Track)

    Competition results for the UFLIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UFLIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5SMTInterpol - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    UFLIA (Single Query Track)

    - + cvc5 0 1606 @@ -142,7 +142,7 @@

    UFLIA (Single Query Track)

    - + 2022-cvc5n 0 1605 @@ -153,7 +153,7 @@

    UFLIA (Single Query Track)

    - + Vampire 0 1328 @@ -164,7 +164,7 @@

    UFLIA (Single Query Track)

    - + iProver 0 662 @@ -175,7 +175,7 @@

    UFLIA (Single Query Track)

    - + iProver Fixedn 0 647 @@ -186,7 +186,7 @@

    UFLIA (Single Query Track)

    - + SMTInterpol 0 406 @@ -197,7 +197,7 @@

    UFLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -219,7 +219,7 @@

    UFLIA (Single Query Track)

    - + cvc5 0 160636898.00637550.98916062160412431226 @@ -228,7 +228,7 @@

    UFLIA (Single Query Track)

    - + 2022-cvc5n 0 160538431.33338768.58416052160312441225 @@ -237,7 +237,7 @@

    UFLIA (Single Query Track)

    - + Vampire 0 1380202465.07251118.36313800138014691469 @@ -246,7 +246,7 @@

    UFLIA (Single Query Track)

    - + iProver 0 691144276.49336808.466691069121582158 @@ -255,7 +255,7 @@

    UFLIA (Single Query Track)

    - + iProver Fixedn 0 682153167.11439174.851682068221672165 @@ -264,7 +264,7 @@

    UFLIA (Single Query Track)

    - + SMTInterpol 0 40732213.00226213.586407540224422336 @@ -273,7 +273,7 @@

    UFLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.000028497 @@ -293,7 +293,7 @@

    UFLIA (Single Query Track)

    - + SMTInterpol 0 54.2092.495550528392336 @@ -302,7 +302,7 @@

    UFLIA (Single Query Track)

    - + 2022-cvc5n 0 2513.229516.977220828391225 @@ -311,7 +311,7 @@

    UFLIA (Single Query Track)

    - + cvc5 0 2514.824539.181220828391226 @@ -320,7 +320,7 @@

    UFLIA (Single Query Track)

    - + Vampire 0 00.00.00001028391469 @@ -329,7 +329,7 @@

    UFLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.00001028397 @@ -338,7 +338,7 @@

    UFLIA (Single Query Track)

    - + iProver 0 00.00.00001028392158 @@ -347,7 +347,7 @@

    UFLIA (Single Query Track)

    - + iProver Fixedn 0 00.00.00001028392165 @@ -367,7 +367,7 @@

    UFLIA (Single Query Track)

    - + cvc5 0 160436383.18337011.8081604016048011651226 @@ -376,7 +376,7 @@

    UFLIA (Single Query Track)

    - + 2022-cvc5n 0 160337918.10438251.6061603016038111651225 @@ -385,7 +385,7 @@

    UFLIA (Single Query Track)

    - + Vampire 0 1380202465.07251118.36313800138030411651469 @@ -394,7 +394,7 @@

    UFLIA (Single Query Track)

    - + iProver 0 691144276.49336808.466691069199311652158 @@ -403,7 +403,7 @@

    UFLIA (Single Query Track)

    - + iProver Fixedn 0 682153167.11439174.8516820682100211652165 @@ -412,7 +412,7 @@

    UFLIA (Single Query Track)

    - + SMTInterpol 0 40232208.79326211.0914020402128211652336 @@ -421,7 +421,7 @@

    UFLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.0000168411657 @@ -441,7 +441,7 @@

    UFLIA (Single Query Track)

    - + cvc5 0 14282398.5632387.414281142714211415 @@ -450,7 +450,7 @@

    UFLIA (Single Query Track)

    - + 2022-cvc5n 0 14132723.5192679.76914131141214361430 @@ -459,7 +459,7 @@

    UFLIA (Single Query Track)

    - + Vampire 0 11479539.6612576.09911470114717021702 @@ -468,7 +468,7 @@

    UFLIA (Single Query Track)

    - + iProver 0 4757855.9062163.704475047523742374 @@ -477,7 +477,7 @@

    UFLIA (Single Query Track)

    - + iProver Fixedn 0 4697601.7762112.888469046923802380 @@ -486,7 +486,7 @@

    UFLIA (Single Query Track)

    - + SMTInterpol 0 3232942.8721239.514323531825262465 @@ -495,7 +495,7 @@

    UFLIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.00002849122 @@ -519,7 +519,6 @@

    UFLIA (Single Query Track)

    - + - diff --git a/archive/2023/results/uflia-unsat-core.html b/archive/2023/results/uflia-unsat-core.html index 9e9670b6..351e9498 100644 --- a/archive/2023/results/uflia-unsat-core.html +++ b/archive/2023/results/uflia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFLIA (Unsat Core Track)

    Competition results for the UFLIA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    UFLIA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    UFLIA (Unsat Core Track)

    - + 2021-cvc5-ucn 0 933724 @@ -137,7 +137,7 @@

    UFLIA (Unsat Core Track)

    - + cvc5 0 861381 @@ -148,7 +148,7 @@

    UFLIA (Unsat Core Track)

    - + SMTInterpol 0 592956 @@ -159,7 +159,7 @@

    UFLIA (Unsat Core Track)

    - + Vampire 0 510459 @@ -170,7 +170,7 @@

    UFLIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 1 1414 @@ -192,7 +192,7 @@

    UFLIA (Unsat Core Track)

    - + 2021-cvc5-ucn 0 93372413764.87413738.55187 @@ -201,7 +201,7 @@

    UFLIA (Unsat Core Track)

    - + cvc5 0 86138119026.38418941.356184 @@ -210,7 +210,7 @@

    UFLIA (Unsat Core Track)

    - + SMTInterpol 0 59403163488.54652006.5071411 @@ -219,7 +219,7 @@

    UFLIA (Unsat Core Track)

    - + Vampire 0 51115622755.955926.942293 @@ -228,7 +228,7 @@

    UFLIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 1 1414819.49739.4514 @@ -252,7 +252,6 @@

    UFLIA (Unsat Core Track)

    - + - diff --git a/archive/2023/results/uflra-incremental.html b/archive/2023/results/uflra-incremental.html index c73dac79..b667cf84 100644 --- a/archive/2023/results/uflra-incremental.html +++ b/archive/2023/results/uflra-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFLRA (Incremental Track)

    Competition results for the UFLRA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    UFLRA (Incremental Track)

    Parallel Performance cvc5 - - + + @@ -124,7 +124,7 @@

    UFLRA (Incremental Track)

    - + 2021-z3n 0 355479145890.98145946.5920890058 @@ -133,7 +133,7 @@

    UFLRA (Incremental Track)

    - + cvc5 0 12142441745.6241790.4644295517 @@ -142,7 +142,7 @@

    UFLRA (Incremental Track)

    - + SMTInterpol 0 115741237060.81226499.74448638215 @@ -151,7 +151,7 @@

    UFLRA (Incremental Track)

    - + UltimateEliminator+MathSAT 0 00.00.05643790 @@ -175,7 +175,6 @@

    UFLRA (Incremental Track)

    - + - diff --git a/archive/2023/results/uflra-proof-exhibition.html b/archive/2023/results/uflra-proof-exhibition.html index 70fd0f84..f243ef49 100644 --- a/archive/2023/results/uflra-proof-exhibition.html +++ b/archive/2023/results/uflra-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFLRA (Proof Exhibition Track)

    Competition results for the UFLRA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    UFLRA (Proof Exhibition Track)

    - + cvc5 0 10 @@ -130,7 +130,7 @@

    UFLRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 10 @@ -141,7 +141,7 @@

    UFLRA (Proof Exhibition Track)

    - + SMTInterpol 0 10 @@ -163,7 +163,7 @@

    UFLRA (Proof Exhibition Track)

    - + cvc5 0 100.4330.42200 @@ -172,7 +172,7 @@

    UFLRA (Proof Exhibition Track)

    - + cvc5-lfsc 0 100.5690.55800 @@ -181,7 +181,7 @@

    UFLRA (Proof Exhibition Track)

    - + SMTInterpol 0 1013.2038.48400 @@ -205,7 +205,6 @@

    UFLRA (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/uflra-single-query.html b/archive/2023/results/uflra-single-query.html index c4d91e7a..e3b386b1 100644 --- a/archive/2023/results/uflra-single-query.html +++ b/archive/2023/results/uflra-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFLRA (Single Query Track)

    Competition results for the UFLRA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UFLRA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5— - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    UFLRA (Single Query Track)

    - + cvc5 0 2 @@ -142,7 +142,7 @@

    UFLRA (Single Query Track)

    - + 2022-cvc5n 0 2 @@ -153,7 +153,7 @@

    UFLRA (Single Query Track)

    - + SMTInterpol 0 2 @@ -164,7 +164,7 @@

    UFLRA (Single Query Track)

    - + Vampire 0 2 @@ -175,7 +175,7 @@

    UFLRA (Single Query Track)

    - + iProver 0 2 @@ -186,7 +186,7 @@

    UFLRA (Single Query Track)

    - + iProver Fixedn 0 2 @@ -197,7 +197,7 @@

    UFLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -219,7 +219,7 @@

    UFLRA (Single Query Track)

    - + cvc5 0 20.0410.03920252 @@ -228,7 +228,7 @@

    UFLRA (Single Query Track)

    - + 2022-cvc5n 0 20.0460.04420253 @@ -237,7 +237,7 @@

    UFLRA (Single Query Track)

    - + SMTInterpol 0 21.2890.88820254 @@ -246,7 +246,7 @@

    UFLRA (Single Query Track)

    - + Vampire 0 25.5331.78720255 @@ -255,7 +255,7 @@

    UFLRA (Single Query Track)

    - + iProver 0 27.7972.30220254 @@ -264,7 +264,7 @@

    UFLRA (Single Query Track)

    - + iProver Fixedn 0 28.4642.80520254 @@ -273,7 +273,7 @@

    UFLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.000070 @@ -293,7 +293,7 @@

    UFLRA (Single Query Track)

    - + 2022-cvc5n 0 00.00.0000523 @@ -302,7 +302,7 @@

    UFLRA (Single Query Track)

    - + Vampire 0 00.00.0000525 @@ -311,7 +311,7 @@

    UFLRA (Single Query Track)

    - + cvc5 0 00.00.0000522 @@ -320,7 +320,7 @@

    UFLRA (Single Query Track)

    - + SMTInterpol 0 00.00.0000524 @@ -329,7 +329,7 @@

    UFLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.0000520 @@ -338,7 +338,7 @@

    UFLRA (Single Query Track)

    - + iProver 0 00.00.0000524 @@ -347,7 +347,7 @@

    UFLRA (Single Query Track)

    - + iProver Fixedn 0 00.00.0000524 @@ -367,7 +367,7 @@

    UFLRA (Single Query Track)

    - + cvc5 0 20.0410.039202052 @@ -376,7 +376,7 @@

    UFLRA (Single Query Track)

    - + 2022-cvc5n 0 20.0460.044202053 @@ -385,7 +385,7 @@

    UFLRA (Single Query Track)

    - + SMTInterpol 0 21.2890.888202054 @@ -394,7 +394,7 @@

    UFLRA (Single Query Track)

    - + Vampire 0 25.5331.787202055 @@ -403,7 +403,7 @@

    UFLRA (Single Query Track)

    - + iProver 0 27.7972.302202054 @@ -412,7 +412,7 @@

    UFLRA (Single Query Track)

    - + iProver Fixedn 0 28.4642.805202054 @@ -421,7 +421,7 @@

    UFLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.0000250 @@ -441,7 +441,7 @@

    UFLRA (Single Query Track)

    - + cvc5 0 20.0410.03920255 @@ -450,7 +450,7 @@

    UFLRA (Single Query Track)

    - + 2022-cvc5n 0 20.0460.04420255 @@ -459,7 +459,7 @@

    UFLRA (Single Query Track)

    - + SMTInterpol 0 21.2890.88820254 @@ -468,7 +468,7 @@

    UFLRA (Single Query Track)

    - + Vampire 0 25.5331.78720255 @@ -477,7 +477,7 @@

    UFLRA (Single Query Track)

    - + iProver 0 27.7972.30220254 @@ -486,7 +486,7 @@

    UFLRA (Single Query Track)

    - + iProver Fixedn 0 28.4642.80520254 @@ -495,7 +495,7 @@

    UFLRA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 00.00.000070 @@ -519,7 +519,6 @@

    UFLRA (Single Query Track)

    - + - diff --git a/archive/2023/results/uflra-unsat-core.html b/archive/2023/results/uflra-unsat-core.html index e1beddd6..0a2b2a13 100644 --- a/archive/2023/results/uflra-unsat-core.html +++ b/archive/2023/results/uflra-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFLRA (Unsat Core Track)

    Competition results for the UFLRA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    UFLRA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    UFLRA (Unsat Core Track)

    - + 2021-cvc5-ucn 0 16 @@ -137,7 +137,7 @@

    UFLRA (Unsat Core Track)

    - + cvc5 0 16 @@ -148,7 +148,7 @@

    UFLRA (Unsat Core Track)

    - + SMTInterpol 0 16 @@ -159,7 +159,7 @@

    UFLRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 0 @@ -170,7 +170,7 @@

    UFLRA (Unsat Core Track)

    - + Vampire 0 0 @@ -192,7 +192,7 @@

    UFLRA (Unsat Core Track)

    - + 2021-cvc5-ucn 0 160.20.1960 @@ -201,7 +201,7 @@

    UFLRA (Unsat Core Track)

    - + cvc5 0 160.2140.210 @@ -210,7 +210,7 @@

    UFLRA (Unsat Core Track)

    - + SMTInterpol 0 167.0434.7230 @@ -219,7 +219,7 @@

    UFLRA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 0 00.00.00 @@ -228,7 +228,7 @@

    UFLRA (Unsat Core Track)

    - + Vampire 0 07.0082.6710 @@ -252,7 +252,6 @@

    UFLRA (Unsat Core Track)

    - + - diff --git a/archive/2023/results/ufnia-cloud.html b/archive/2023/results/ufnia-cloud.html index 9cf59b9b..3c147609 100644 --- a/archive/2023/results/ufnia-cloud.html +++ b/archive/2023/results/ufnia-cloud.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFNIA (Cloud Track)

    Competition results for the UFNIA - + logic - + in the Cloud Track.

    @@ -117,7 +117,7 @@

    UFNIA (Cloud Track)

    - + cvc5 0 2414.182202360 @@ -126,7 +126,7 @@

    UFNIA (Cloud Track)

    - + Vampire 0 1555.922101370 @@ -146,7 +146,7 @@

    UFNIA (Cloud Track)

    - + Vampire 0 00.00000380 @@ -155,7 +155,7 @@

    UFNIA (Cloud Track)

    - + cvc5 0 00.00000380 @@ -175,7 +175,7 @@

    UFNIA (Cloud Track)

    - + cvc5 0 2414.1822020360 @@ -184,7 +184,7 @@

    UFNIA (Cloud Track)

    - + Vampire 0 1555.9221011360 @@ -204,7 +204,7 @@

    UFNIA (Cloud Track)

    - + Vampire 0 00.00003838 @@ -213,7 +213,7 @@

    UFNIA (Cloud Track)

    - + cvc5 0 00.00003835 @@ -237,7 +237,6 @@

    UFNIA (Cloud Track)

    - + - diff --git a/archive/2023/results/ufnia-incremental.html b/archive/2023/results/ufnia-incremental.html index d913a55d..3710f192 100644 --- a/archive/2023/results/ufnia-incremental.html +++ b/archive/2023/results/ufnia-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFNIA (Incremental Track)

    Competition results for the UFNIA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    UFNIA (Incremental Track)

    Parallel Performance cvc5 - - + + @@ -124,7 +124,7 @@

    UFNIA (Incremental Track)

    - + 2022-z3-4.8.17n 0 84259610167.22610383.06243319988 @@ -133,7 +133,7 @@

    UFNIA (Incremental Track)

    - + cvc5 0 3112761618.6361780.45296451888 @@ -142,7 +142,7 @@

    UFNIA (Incremental Track)

    - + SMTInterpol 0 1449898746.368091.663130801041 @@ -151,7 +151,7 @@

    UFNIA (Incremental Track)

    - + UltimateEliminator+MathSAT 0 00.00.03275780 @@ -175,7 +175,6 @@

    UFNIA (Incremental Track)

    - + - diff --git a/archive/2023/results/ufnia-parallel.html b/archive/2023/results/ufnia-parallel.html index f951cf3f..fa17f703 100644 --- a/archive/2023/results/ufnia-parallel.html +++ b/archive/2023/results/ufnia-parallel.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFNIA (Parallel Track)

    Competition results for the UFNIA - + logic - + in the Parallel Track.

    @@ -117,7 +117,7 @@

    UFNIA (Parallel Track)

    - + Vampire 0 1116.634101360 @@ -126,7 +126,7 @@

    UFNIA (Parallel Track)

    - + iProver 0 00.0000370 @@ -146,7 +146,7 @@

    UFNIA (Parallel Track)

    - + Vampire 0 00.00000370 @@ -155,7 +155,7 @@

    UFNIA (Parallel Track)

    - + iProver 0 00.00000370 @@ -175,7 +175,7 @@

    UFNIA (Parallel Track)

    - + Vampire 0 1116.6341010360 @@ -184,7 +184,7 @@

    UFNIA (Parallel Track)

    - + iProver 0 00.00001360 @@ -204,7 +204,7 @@

    UFNIA (Parallel Track)

    - + Vampire 0 00.00003737 @@ -213,7 +213,7 @@

    UFNIA (Parallel Track)

    - + iProver 0 00.00003737 @@ -237,7 +237,6 @@

    UFNIA (Parallel Track)

    - + - diff --git a/archive/2023/results/ufnia-proof-exhibition.html b/archive/2023/results/ufnia-proof-exhibition.html index 87146ca4..cbda45cd 100644 --- a/archive/2023/results/ufnia-proof-exhibition.html +++ b/archive/2023/results/ufnia-proof-exhibition.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFNIA (Proof Exhibition Track)

    Competition results for the UFNIA - + logic - + in the Proof Exhibition Track.

    @@ -119,7 +119,7 @@

    UFNIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 2309 @@ -130,7 +130,7 @@

    UFNIA (Proof Exhibition Track)

    - + cvc5 0 1244 @@ -152,7 +152,7 @@

    UFNIA (Proof Exhibition Track)

    - + cvc5-lfsc 0 230946088.15645775.872518518 @@ -161,7 +161,7 @@

    UFNIA (Proof Exhibition Track)

    - + cvc5 0 124436470.67136019.91615831360 @@ -185,7 +185,6 @@

    UFNIA (Proof Exhibition Track)

    - + - diff --git a/archive/2023/results/ufnia-single-query.html b/archive/2023/results/ufnia-single-query.html index 0a5d580a..4983cc19 100644 --- a/archive/2023/results/ufnia-single-query.html +++ b/archive/2023/results/ufnia-single-query.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFNIA (Single Query Track)

    Competition results for the UFNIA - + logic - + in the Single Query Track.

    @@ -104,13 +104,13 @@

    UFNIA (Single Query Track)

    Sequential PerformanceParallel PerformanceSAT Performance (parallel)UNSAT Performance (parallel)24s Performance (parallel) cvc5cvc5cvc5 - - + + cvc5 - - + + cvc5 - + @@ -131,7 +131,7 @@

    UFNIA (Single Query Track)

    - + cvc5 0 3623 @@ -142,7 +142,7 @@

    UFNIA (Single Query Track)

    - + 2022-cvc5n 0 3588 @@ -153,7 +153,7 @@

    UFNIA (Single Query Track)

    - + Vampire 0 2436 @@ -164,7 +164,7 @@

    UFNIA (Single Query Track)

    - + iProver 0 1486 @@ -175,7 +175,7 @@

    UFNIA (Single Query Track)

    - + iProver Fixedn 0 1465 @@ -186,7 +186,7 @@

    UFNIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 608 @@ -208,7 +208,7 @@

    UFNIA (Single Query Track)

    - + cvc5 0 3623122297.938125475.9113623742288126562647 @@ -217,7 +217,7 @@

    UFNIA (Single Query Track)

    - + 2022-cvc5n 0 3588137415.366139922.333588741284726912684 @@ -226,7 +226,7 @@

    UFNIA (Single Query Track)

    - + Vampire 0 2738944263.147237815.66927380273835413539 @@ -235,7 +235,7 @@

    UFNIA (Single Query Track)

    - + iProver 0 1690500889.071128099.71716900169045894589 @@ -244,7 +244,7 @@

    UFNIA (Single Query Track)

    - + iProver Fixedn 0 1667451840.86115459.94616670166746124612 @@ -253,7 +253,7 @@

    UFNIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 60811637.28810430.4546084261825671529 @@ -273,7 +273,7 @@

    UFNIA (Single Query Track)

    - + cvc5 0 74223882.11923928.2027427420655312647 @@ -282,7 +282,7 @@

    UFNIA (Single Query Track)

    - + 2022-cvc5n 0 74129139.89929266.6397417410755312684 @@ -291,7 +291,7 @@

    UFNIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 42610386.5669536.19342642603225531529 @@ -300,7 +300,7 @@

    UFNIA (Single Query Track)

    - + Vampire 0 00.00.000074855313539 @@ -309,7 +309,7 @@

    UFNIA (Single Query Track)

    - + iProver 0 00.00.000074855314589 @@ -318,7 +318,7 @@

    UFNIA (Single Query Track)

    - + iProver Fixedn 0 00.00.000074855314612 @@ -338,7 +338,7 @@

    UFNIA (Single Query Track)

    - + cvc5 0 288198415.818101547.70828810288128931092647 @@ -347,7 +347,7 @@

    UFNIA (Single Query Track)

    - + 2022-cvc5n 0 2847108275.467110655.69128470284732331092684 @@ -356,7 +356,7 @@

    UFNIA (Single Query Track)

    - + Vampire 0 2738944263.147237815.66927380273843231093539 @@ -365,7 +365,7 @@

    UFNIA (Single Query Track)

    - + iProver 0 1690500889.071128099.717169001690148031094589 @@ -374,7 +374,7 @@

    UFNIA (Single Query Track)

    - + iProver Fixedn 0 1667451840.86115459.946166701667150331094612 @@ -383,7 +383,7 @@

    UFNIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 1821250.723894.261182018229883109529 @@ -403,7 +403,7 @@

    UFNIA (Single Query Track)

    - + cvc5 0 29882717.0172679.7952988688230032913286 @@ -412,7 +412,7 @@

    UFNIA (Single Query Track)

    - + 2022-cvc5n 0 29223164.9863108.2192922676224633573352 @@ -421,7 +421,7 @@

    UFNIA (Single Query Track)

    - + Vampire 0 155217569.8914597.1715520155247274727 @@ -430,7 +430,7 @@

    UFNIA (Single Query Track)

    - + iProver 0 124115918.7184582.61312410124150385038 @@ -439,7 +439,7 @@

    UFNIA (Single Query Track)

    - + iProver Fixedn 0 123515235.9674434.0912350123550445044 @@ -448,7 +448,7 @@

    UFNIA (Single Query Track)

    - + UltimateEliminator+MathSAT 0 5694118.0923003.985693891805710630 @@ -472,7 +472,6 @@

    UFNIA (Single Query Track)

    - + - diff --git a/archive/2023/results/ufnia-unsat-core.html b/archive/2023/results/ufnia-unsat-core.html index b70ccf9f..f6f82501 100644 --- a/archive/2023/results/ufnia-unsat-core.html +++ b/archive/2023/results/ufnia-unsat-core.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFNIA (Unsat Core Track)

    Competition results for the UFNIA - + logic - + in the Unsat Core Track.

    @@ -104,8 +104,8 @@

    UFNIA (Unsat Core Track)

    Sequential PerformanceParallel Performance cvc5cvc5 - - + + @@ -126,7 +126,7 @@

    UFNIA (Unsat Core Track)

    - + cvc5 0 96027 @@ -137,7 +137,7 @@

    UFNIA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 95015 @@ -148,7 +148,7 @@

    UFNIA (Unsat Core Track)

    - + Vampire 0 45315 @@ -159,7 +159,7 @@

    UFNIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 1 223 @@ -181,7 +181,7 @@

    UFNIA (Unsat Core Track)

    - + cvc5 0 9602786300.95386262.153630 @@ -190,7 +190,7 @@

    UFNIA (Unsat Core Track)

    - + 2020-CVC4-ucn 0 9501546605.53746476.282744 @@ -199,7 +199,7 @@

    UFNIA (Unsat Core Track)

    - + Vampire 0 458646382.9541644.119426 @@ -208,7 +208,7 @@

    UFNIA (Unsat Core Track)

    - + UltimateEliminator+MathSAT 1 2234333.6353333.695212 @@ -232,7 +232,6 @@

    UFNIA (Unsat Core Track)

    - + - diff --git a/archive/2023/results/ufnra-incremental.html b/archive/2023/results/ufnra-incremental.html index 4042da01..4446fa34 100644 --- a/archive/2023/results/ufnra-incremental.html +++ b/archive/2023/results/ufnra-incremental.html @@ -35,7 +35,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -56,7 +56,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -82,9 +82,9 @@

    UFNRA (Incremental Track)

    Competition results for the UFNRA - + logic - + in the Incremental Track.

    @@ -104,8 +104,8 @@

    UFNRA (Incremental Track)

    Parallel Performance cvc5 - - + + @@ -124,7 +124,7 @@

    UFNRA (Incremental Track)

    - + 2022-z3-4.8.17n 0 2091.752.0900 @@ -133,7 +133,7 @@

    UFNRA (Incremental Track)

    - + cvc5 0 50.510.582041 @@ -142,7 +142,7 @@

    UFNRA (Incremental Track)

    - + SMTInterpol 0 20.00.92074 @@ -151,7 +151,7 @@

    UFNRA (Incremental Track)

    - + UltimateEliminator+MathSAT 0 00.00.02090 @@ -175,7 +175,6 @@

    UFNRA (Incremental Track)

    - + - diff --git a/archive/2023/slides.html b/archive/2023/slides.html index 9ef5286d..36db8f33 100644 --- a/archive/2023/slides.html +++ b/archive/2023/slides.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -75,7 +75,6 @@

    SMT workshop presentation

    - + - diff --git a/archive/2023/specs.html b/archive/2023/specs.html index 6c90634d..5b8fb817 100644 --- a/archive/2023/specs.html +++ b/archive/2023/specs.html @@ -34,7 +34,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -55,7 +55,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -96,7 +96,6 @@

    Machine Specifications

    - + - diff --git a/archive/2023/stats.html b/archive/2023/stats.html index 31636c16..733516da 100644 --- a/archive/2023/stats.html +++ b/archive/2023/stats.html @@ -48,7 +48,7 @@

    SMT-COMP

    - +

    The International Satisfiability Modulo Theories (SMT) Competition.

    @@ -69,7 +69,7 @@

    SMT-COMP 2023 Rules
    Benchmarks
    Specs
    Model Validation Track
    Proof Exhibition Track
    Parallel & Cloud Tracks
    Participants
    Results
    Statistics
    Comparisons
    Slides

    - + @@ -154,7 +154,6 @@

    SMT-COMP 2023 - + - diff --git a/archive/assets/js/scale.fix.js b/archive/assets/js/scale.fix.js index 08716c00..7193b1ae 100644 --- a/archive/assets/js/scale.fix.js +++ b/archive/assets/js/scale.fix.js @@ -17,4 +17,4 @@ fixScale = function(doc) { doc[addEvent](type, fix, true); } -}; \ No newline at end of file +}; diff --git a/archive/js/jquery.tablesorter.js b/archive/js/jquery.tablesorter.js index 3ffdf251..5d9fda61 100644 --- a/archive/js/jquery.tablesorter.js +++ b/archive/js/jquery.tablesorter.js @@ -1,111 +1,111 @@ /* - * + * * TableSorter 2.0 - Client-side table sorting with ease! * Version 2.0.5b * @requires jQuery v1.2.3 - * + * * Copyright (c) 2007 Christian Bach * Examples and docs at: http://tablesorter.com * Dual licensed under the MIT and GPL licenses: * http://www.opensource.org/licenses/mit-license.php * http://www.gnu.org/licenses/gpl.html - * + * */ /** - * + * * @description Create a sortable table with multi-column sorting capabilitys - * + * * @example $('table').tablesorter(); * @desc Create a simple tablesorter interface. - * + * * @example $('table').tablesorter({ sortList:[[0,0],[1,0]] }); * @desc Create a tablesorter interface and sort on the first and secound column column headers. - * + * * @example $('table').tablesorter({ headers: { 0: { sorter: false}, 1: {sorter: false} } }); - * + * * @desc Create a tablesorter interface and disableing the first and second column headers. - * - * + * + * * @example $('table').tablesorter({ headers: { 0: {sorter:"integer"}, 1: {sorter:"currency"} } }); - * + * * @desc Create a tablesorter interface and set a column parser for the first * and second column. - * - * + * + * * @param Object * settings An object literal containing key/value pairs to provide * optional settings. - * - * + * + * * @option String cssHeader (optional) A string of the class name to be appended * to sortable tr elements in the thead of the table. Default value: * "header" - * + * * @option String cssAsc (optional) A string of the class name to be appended to * sortable tr elements in the thead on a ascending sort. Default value: * "headerSortUp" - * + * * @option String cssDesc (optional) A string of the class name to be appended * to sortable tr elements in the thead on a descending sort. Default * value: "headerSortDown" - * + * * @option String sortInitialOrder (optional) A string of the inital sorting * order can be asc or desc. Default value: "asc" - * + * * @option String sortMultisortKey (optional) A string of the multi-column sort * key. Default value: "shiftKey" - * + * * @option String textExtraction (optional) A string of the text-extraction * method to use. For complex html structures inside td cell set this * option to "complex", on large tables the complex option can be slow. * Default value: "simple" - * + * * @option Object headers (optional) An object of instructions for per-column - * controls in the format: headers: { 0: { option: setting }, ... }. For + * controls in the format: headers: { 0: { option: setting }, ... }. For * example, to disable sorting on the first two columns of a table: * headers: { 0: { sorter: false}, 1: {sorter: false} }. * Default value: null. - * - * @option Array sortList (optional) An array of instructions for per-column sorting - * and direction in the format: [[columnIndex, sortDirection], ... ] where - * columnIndex is a zero-based index for your columns left-to-right and - * sortDirection is 0 for Ascending and 1 for Descending. A valid argument - * that sorts ascending first by column 1 and then column 2 looks like: + * + * @option Array sortList (optional) An array of instructions for per-column sorting + * and direction in the format: [[columnIndex, sortDirection], ... ] where + * columnIndex is a zero-based index for your columns left-to-right and + * sortDirection is 0 for Ascending and 1 for Descending. A valid argument + * that sorts ascending first by column 1 and then column 2 looks like: * [[0,0],[1,0]]. Default value: null. - * + * * @option Array sortForce (optional) An array containing forced sorting rules. * Use to add an additional forced sort that will be appended to the dynamic * selections by the user. For example, can be used to sort people alphabetically - * after some other user-selected sort that results in rows with the same value - * like dates or money due. It can help prevent data from appearing as though it + * after some other user-selected sort that results in rows with the same value + * like dates or money due. It can help prevent data from appearing as though it * has a random secondary sort. Default value: null. - * + * * @option Boolean sortLocaleCompare (optional) Boolean flag indicating whatever * to use String.localeCampare method or not. Default set to true. - * - * + * + * * @option Array sortAppend (optional) An array containing forced sorting rules. * This option let's you specify a default sorting rule, which is * appended to user-selected rules. Default value: null - * + * * @option Boolean widthFixed (optional) Boolean flag indicating if tablesorter * should apply fixed widths to the table columns. This is usefull when * using the pager companion plugin. This options requires the dimension * jquery plugin. Default value: false - * + * * @option Boolean cancelSelection (optional) Boolean flag indicating if * tablesorter should cancel selection of the table headers text. * Default value: true - * + * * @option Boolean debug (optional) Boolean flag indicating if tablesorter * should display debuging information usefull for development. - * + * * @type jQuery - * + * * @name tablesorter - * + * * @cat Plugins/Tablesorter - * + * * @author Christian Bach/christian.bach@polyester.se */ @@ -300,9 +300,9 @@ }; function getElementText(config, node) { - + if (!node) return ""; - + var $node = $(node), data = $node.attr('data-sort-value'); if (data !== undefined) return data; @@ -359,7 +359,7 @@ tableBody[0].appendChild(r[pos][j]); } - // + // } } @@ -393,7 +393,7 @@ } var meta = ($.metadata) ? true : false; - + var header_index = computeTableHeaderCellIndexes(table); var $tableHeaders = $(table.config.selectorHeaders, table).each(function (index) { @@ -401,8 +401,8 @@ this.column = header_index[this.parentNode.rowIndex + "-" + this.cellIndex]; // this.column = index; this.order = formatSortingOrder(table.config.sortInitialOrder); - - + + this.count = this.order; if (checkHeaderMetadata(this) || checkHeaderOptions(table, index)) this.sortDisabled = true; @@ -505,12 +505,12 @@ }; return false; } - + function checkHeaderOptionsSortingLocked(table, i) { if ((table.config.headers[i]) && (table.config.headers[i].lockedOrder)) return table.config.headers[i].lockedOrder; return false; } - + function applyWidget(table) { var c = table.config.widgets; var l = c.length; @@ -588,7 +588,7 @@ } /* sorting methods */ - + var sortWrapper; function multisort(table, sortList, cache) { @@ -741,7 +741,7 @@ this.order = this.count++ % 2; // always sort on the locked order. if(this.lockedOrder) this.order = this.lockedOrder; - + // user only whants to sort on one // column if (!e[config.sortMultiSortKey]) { @@ -988,9 +988,9 @@ if (c.dateFormat == "us") { // reformat the string in ISO format s = s.replace(/(\d{1,2})[\/\-](\d{1,2})[\/\-](\d{4})/, "$3/$1/$2"); - } + } if (c.dateFormat == "pt") { - s = s.replace(/(\d{1,2})[\/\-](\d{1,2})[\/\-](\d{4})/, "$3/$2/$1"); + s = s.replace(/(\d{1,2})[\/\-](\d{1,2})[\/\-](\d{4})/, "$3/$2/$1"); } else if (c.dateFormat == "uk") { // reformat the string in ISO format s = s.replace(/(\d{1,2})[\/\-](\d{1,2})[\/\-](\d{4})/, "$3/$2/$1"); diff --git a/codecov.yaml b/codecov.yaml new file mode 100644 index 00000000..058cfb76 --- /dev/null +++ b/codecov.yaml @@ -0,0 +1,9 @@ +coverage: + range: 70..100 + round: down + precision: 1 + status: + project: + default: + target: 90% + threshold: 0.5% diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 00000000..7c6acda7 --- /dev/null +++ b/poetry.lock @@ -0,0 +1,1114 @@ +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. + +[[package]] +name = "annotated-types" +version = "0.6.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +files = [ + {file = "annotated_types-0.6.0-py3-none-any.whl", hash = "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43"}, + {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"}, +] + +[[package]] +name = "beautifulsoup4" +version = "4.12.3" +description = "Screen-scraping library" +optional = false +python-versions = ">=3.6.0" +files = [ + {file = "beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed"}, + {file = "beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051"}, +] + +[package.dependencies] +soupsieve = ">1.2" + +[package.extras] +cchardet = ["cchardet"] +chardet = ["chardet"] +charset-normalizer = ["charset-normalizer"] +html5lib = ["html5lib"] +lxml = ["lxml"] + +[[package]] +name = "benchexec" +version = "3.20" +description = "A Framework for Reliable Benchmarking and Resource Measurement." +optional = false +python-versions = "*" +files = [ + {file = "BenchExec-3.20-py3-none-any.whl", hash = "sha256:6b8c12fbcd37c9e59a73e820e5e33881a0c88b6244a9d21300bdf0f80d822cd9"}, + {file = "BenchExec-3.20.tar.gz", hash = "sha256:e796e8636772825aa7c72aa3aaf0793522e3d0d55eb9220f7706421d4d3f38a9"}, +] + +[package.dependencies] +PyYAML = ">=3.12" + +[package.extras] +dev = ["lxml", "nose (>=1.0)"] +systemd = ["pystemd (>=0.7.0)"] + +[[package]] +name = "bs4" +version = "0.0.2" +description = "Dummy package for Beautiful Soup (beautifulsoup4)" +optional = false +python-versions = "*" +files = [ + {file = "bs4-0.0.2-py2.py3-none-any.whl", hash = "sha256:abf8742c0805ef7f662dce4b51cca104cffe52b835238afc169142ab9b3fbccc"}, + {file = "bs4-0.0.2.tar.gz", hash = "sha256:a48685c58f50fe127722417bae83fe6badf500d54b55f7e39ffe43b798653925"}, +] + +[package.dependencies] +beautifulsoup4 = "*" + +[[package]] +name = "cachetools" +version = "5.3.2" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, + {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, +] + +[[package]] +name = "certifi" +version = "2024.2.2" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, +] + +[[package]] +name = "cfgv" +version = "3.4.0" +description = "Validate configuration and produce human readable error messages." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, + {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, +] + +[[package]] +name = "chardet" +version = "5.2.0" +description = "Universal encoding detector for Python 3" +optional = false +python-versions = ">=3.7" +files = [ + {file = "chardet-5.2.0-py3-none-any.whl", hash = "sha256:e1cf59446890a00105fe7b7912492ea04b6e6f06d4b742b2c788469e34c82970"}, + {file = "chardet-5.2.0.tar.gz", hash = "sha256:1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "coverage" +version = "7.4.1" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "coverage-7.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:077d366e724f24fc02dbfe9d946534357fda71af9764ff99d73c3c596001bbd7"}, + {file = "coverage-7.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0193657651f5399d433c92f8ae264aff31fc1d066deee4b831549526433f3f61"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d17bbc946f52ca67adf72a5ee783cd7cd3477f8f8796f59b4974a9b59cacc9ee"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3277f5fa7483c927fe3a7b017b39351610265308f5267ac6d4c2b64cc1d8d25"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dceb61d40cbfcf45f51e59933c784a50846dc03211054bd76b421a713dcdf19"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6008adeca04a445ea6ef31b2cbaf1d01d02986047606f7da266629afee982630"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c61f66d93d712f6e03369b6a7769233bfda880b12f417eefdd4f16d1deb2fc4c"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9bb62fac84d5f2ff523304e59e5c439955fb3b7f44e3d7b2085184db74d733b"}, + {file = "coverage-7.4.1-cp310-cp310-win32.whl", hash = "sha256:f86f368e1c7ce897bf2457b9eb61169a44e2ef797099fb5728482b8d69f3f016"}, + {file = "coverage-7.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:869b5046d41abfea3e381dd143407b0d29b8282a904a19cb908fa24d090cc018"}, + {file = "coverage-7.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b8ffb498a83d7e0305968289441914154fb0ef5d8b3157df02a90c6695978295"}, + {file = "coverage-7.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3cacfaefe6089d477264001f90f55b7881ba615953414999c46cc9713ff93c8c"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d6850e6e36e332d5511a48a251790ddc545e16e8beaf046c03985c69ccb2676"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18e961aa13b6d47f758cc5879383d27b5b3f3dcd9ce8cdbfdc2571fe86feb4dd"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfd1e1b9f0898817babf840b77ce9fe655ecbe8b1b327983df485b30df8cc011"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6b00e21f86598b6330f0019b40fb397e705135040dbedc2ca9a93c7441178e74"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:536d609c6963c50055bab766d9951b6c394759190d03311f3e9fcf194ca909e1"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7ac8f8eb153724f84885a1374999b7e45734bf93a87d8df1e7ce2146860edef6"}, + {file = "coverage-7.4.1-cp311-cp311-win32.whl", hash = "sha256:f3771b23bb3675a06f5d885c3630b1d01ea6cac9e84a01aaf5508706dba546c5"}, + {file = "coverage-7.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:9d2f9d4cc2a53b38cabc2d6d80f7f9b7e3da26b2f53d48f05876fef7956b6968"}, + {file = "coverage-7.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f68ef3660677e6624c8cace943e4765545f8191313a07288a53d3da188bd8581"}, + {file = "coverage-7.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:23b27b8a698e749b61809fb637eb98ebf0e505710ec46a8aa6f1be7dc0dc43a6"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e3424c554391dc9ef4a92ad28665756566a28fecf47308f91841f6c49288e66"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0860a348bf7004c812c8368d1fc7f77fe8e4c095d661a579196a9533778e156"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe558371c1bdf3b8fa03e097c523fb9645b8730399c14fe7721ee9c9e2a545d3"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3468cc8720402af37b6c6e7e2a9cdb9f6c16c728638a2ebc768ba1ef6f26c3a1"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:02f2edb575d62172aa28fe00efe821ae31f25dc3d589055b3fb64d51e52e4ab1"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ca6e61dc52f601d1d224526360cdeab0d0712ec104a2ce6cc5ccef6ed9a233bc"}, + {file = "coverage-7.4.1-cp312-cp312-win32.whl", hash = "sha256:ca7b26a5e456a843b9b6683eada193fc1f65c761b3a473941efe5a291f604c74"}, + {file = "coverage-7.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:85ccc5fa54c2ed64bd91ed3b4a627b9cce04646a659512a051fa82a92c04a448"}, + {file = "coverage-7.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8bdb0285a0202888d19ec6b6d23d5990410decb932b709f2b0dfe216d031d218"}, + {file = "coverage-7.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:918440dea04521f499721c039863ef95433314b1db00ff826a02580c1f503e45"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:379d4c7abad5afbe9d88cc31ea8ca262296480a86af945b08214eb1a556a3e4d"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b094116f0b6155e36a304ff912f89bbb5067157aff5f94060ff20bbabdc8da06"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2f5968608b1fe2a1d00d01ad1017ee27efd99b3437e08b83ded9b7af3f6f766"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:10e88e7f41e6197ea0429ae18f21ff521d4f4490aa33048f6c6f94c6045a6a75"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a4a3907011d39dbc3e37bdc5df0a8c93853c369039b59efa33a7b6669de04c60"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6d224f0c4c9c98290a6990259073f496fcec1b5cc613eecbd22786d398ded3ad"}, + {file = "coverage-7.4.1-cp38-cp38-win32.whl", hash = "sha256:23f5881362dcb0e1a92b84b3c2809bdc90db892332daab81ad8f642d8ed55042"}, + {file = "coverage-7.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:a07f61fc452c43cd5328b392e52555f7d1952400a1ad09086c4a8addccbd138d"}, + {file = "coverage-7.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8e738a492b6221f8dcf281b67129510835461132b03024830ac0e554311a5c54"}, + {file = "coverage-7.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:46342fed0fff72efcda77040b14728049200cbba1279e0bf1188f1f2078c1d70"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9641e21670c68c7e57d2053ddf6c443e4f0a6e18e547e86af3fad0795414a628"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aeb2c2688ed93b027eb0d26aa188ada34acb22dceea256d76390eea135083950"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d12c923757de24e4e2110cf8832d83a886a4cf215c6e61ed506006872b43a6d1"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0491275c3b9971cdbd28a4595c2cb5838f08036bca31765bad5e17edf900b2c7"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8dfc5e195bbef80aabd81596ef52a1277ee7143fe419efc3c4d8ba2754671756"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1a78b656a4d12b0490ca72651fe4d9f5e07e3c6461063a9b6265ee45eb2bdd35"}, + {file = "coverage-7.4.1-cp39-cp39-win32.whl", hash = "sha256:f90515974b39f4dea2f27c0959688621b46d96d5a626cf9c53dbc653a895c05c"}, + {file = "coverage-7.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:64e723ca82a84053dd7bfcc986bdb34af8d9da83c521c19d6b472bc6880e191a"}, + {file = "coverage-7.4.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:32a8d985462e37cfdab611a6f95b09d7c091d07668fdc26e47a725ee575fe166"}, + {file = "coverage-7.4.1.tar.gz", hash = "sha256:1ed4b95480952b1a26d863e546fa5094564aa0065e1e5f0d4d0041f293251d04"}, +] + +[package.extras] +toml = ["tomli"] + +[[package]] +name = "deptry" +version = "0.12.0" +description = "A command line utility to check for unused, missing and transitive dependencies in a Python project." +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "deptry-0.12.0-py3-none-any.whl", hash = "sha256:69c801a6ae1b39c7b8e0daf40dbe8b75f1f161277d206dd8f921f32cd22dad91"}, + {file = "deptry-0.12.0.tar.gz", hash = "sha256:ac3cd32d149c92a9af12f63cd9486ddd1760f0277ed0cf306c6ef0388f57ff0a"}, +] + +[package.dependencies] +chardet = ">=4.0.0" +click = ">=8.0.0,<9.0.0" +colorama = {version = ">=0.4.6", markers = "sys_platform == \"win32\""} +pathspec = ">=0.9.0" + +[[package]] +name = "distlib" +version = "0.3.8" +description = "Distribution utilities" +optional = false +python-versions = "*" +files = [ + {file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"}, + {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"}, +] + +[[package]] +name = "dnspython" +version = "2.5.0" +description = "DNS toolkit" +optional = false +python-versions = ">=3.8" +files = [ + {file = "dnspython-2.5.0-py3-none-any.whl", hash = "sha256:6facdf76b73c742ccf2d07add296f178e629da60be23ce4b0a9c927b1e02c3a6"}, + {file = "dnspython-2.5.0.tar.gz", hash = "sha256:a0034815a59ba9ae888946be7ccca8f7c157b286f8455b379c692efb51022a15"}, +] + +[package.extras] +dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=5.0.3)", "mypy (>=1.0.1)", "pylint (>=2.7)", "pytest (>=6.2.5)", "pytest-cov (>=3.0.0)", "sphinx (>=7.0.0)", "twine (>=4.0.0)", "wheel (>=0.41.0)"] +dnssec = ["cryptography (>=41)"] +doh = ["h2 (>=4.1.0)", "httpcore (>=0.17.3)", "httpx (>=0.25.1)"] +doq = ["aioquic (>=0.9.20)"] +idna = ["idna (>=2.1)"] +trio = ["trio (>=0.14)"] +wmi = ["wmi (>=1.5.1)"] + +[[package]] +name = "email-validator" +version = "2.1.0.post1" +description = "A robust email address syntax and deliverability validation library." +optional = false +python-versions = ">=3.8" +files = [ + {file = "email_validator-2.1.0.post1-py3-none-any.whl", hash = "sha256:c973053efbeddfef924dc0bd93f6e77a1ea7ee0fce935aea7103c7a3d6d2d637"}, + {file = "email_validator-2.1.0.post1.tar.gz", hash = "sha256:a4b0bd1cf55f073b924258d19321b1f3aa74b4b5a71a42c305575dba920e1a44"}, +] + +[package.dependencies] +dnspython = ">=2.0.0" +idna = ">=2.0.0" + +[[package]] +name = "filelock" +version = "3.13.1" +description = "A platform independent file lock." +optional = false +python-versions = ">=3.8" +files = [ + {file = "filelock-3.13.1-py3-none-any.whl", hash = "sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c"}, + {file = "filelock-3.13.1.tar.gz", hash = "sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.24)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] +typing = ["typing-extensions (>=4.8)"] + +[[package]] +name = "gitdb" +version = "4.0.11" +description = "Git Object Database" +optional = false +python-versions = ">=3.7" +files = [ + {file = "gitdb-4.0.11-py3-none-any.whl", hash = "sha256:81a3407ddd2ee8df444cbacea00e2d038e40150acfa3001696fe0dcf1d3adfa4"}, + {file = "gitdb-4.0.11.tar.gz", hash = "sha256:bf5421126136d6d0af55bc1e7c1af1c397a34f5b7bd79e776cd3e89785c2b04b"}, +] + +[package.dependencies] +smmap = ">=3.0.1,<6" + +[[package]] +name = "gitpython" +version = "3.1.41" +description = "GitPython is a Python library used to interact with Git repositories" +optional = false +python-versions = ">=3.7" +files = [ + {file = "GitPython-3.1.41-py3-none-any.whl", hash = "sha256:c36b6634d069b3f719610175020a9aed919421c87552185b085e04fbbdb10b7c"}, + {file = "GitPython-3.1.41.tar.gz", hash = "sha256:ed66e624884f76df22c8e16066d567aaa5a37d5b5fa19db2c6df6f7156db9048"}, +] + +[package.dependencies] +gitdb = ">=4.0.1,<5" + +[package.extras] +test = ["black", "coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest (>=7.3.1)", "pytest-cov", "pytest-instafail", "pytest-mock", "pytest-sugar", "sumtypes"] + +[[package]] +name = "identify" +version = "2.5.34" +description = "File identification library for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "identify-2.5.34-py2.py3-none-any.whl", hash = "sha256:a4316013779e433d08b96e5eabb7f641e6c7942e4ab5d4c509ebd2e7a8994aed"}, + {file = "identify-2.5.34.tar.gz", hash = "sha256:ee17bc9d499899bc9eaec1ac7bf2dc9eedd480db9d88b96d123d3b64a9d34f5d"}, +] + +[package.extras] +license = ["ukkonen"] + +[[package]] +name = "idna" +version = "3.6" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "markdown-it-py" +version = "3.0.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +optional = false +python-versions = ">=3.8" +files = [ + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, +] + +[package.dependencies] +mdurl = ">=0.1,<1.0" + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +code-style = ["pre-commit (>=3.0,<4.0)"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] +plugins = ["mdit-py-plugins"] +profiling = ["gprof2dot"] +rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + +[[package]] +name = "mypy" +version = "1.8.0" +description = "Optional static typing for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:485a8942f671120f76afffff70f259e1cd0f0cfe08f81c05d8816d958d4577d3"}, + {file = "mypy-1.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:df9824ac11deaf007443e7ed2a4a26bebff98d2bc43c6da21b2b64185da011c4"}, + {file = "mypy-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2afecd6354bbfb6e0160f4e4ad9ba6e4e003b767dd80d85516e71f2e955ab50d"}, + {file = "mypy-1.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8963b83d53ee733a6e4196954502b33567ad07dfd74851f32be18eb932fb1cb9"}, + {file = "mypy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:e46f44b54ebddbeedbd3d5b289a893219065ef805d95094d16a0af6630f5d410"}, + {file = "mypy-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:855fe27b80375e5c5878492f0729540db47b186509c98dae341254c8f45f42ae"}, + {file = "mypy-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c886c6cce2d070bd7df4ec4a05a13ee20c0aa60cb587e8d1265b6c03cf91da3"}, + {file = "mypy-1.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d19c413b3c07cbecf1f991e2221746b0d2a9410b59cb3f4fb9557f0365a1a817"}, + {file = "mypy-1.8.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9261ed810972061388918c83c3f5cd46079d875026ba97380f3e3978a72f503d"}, + {file = "mypy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:51720c776d148bad2372ca21ca29256ed483aa9a4cdefefcef49006dff2a6835"}, + {file = "mypy-1.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:52825b01f5c4c1c4eb0db253ec09c7aa17e1a7304d247c48b6f3599ef40db8bd"}, + {file = "mypy-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f5ac9a4eeb1ec0f1ccdc6f326bcdb464de5f80eb07fb38b5ddd7b0de6bc61e55"}, + {file = "mypy-1.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afe3fe972c645b4632c563d3f3eff1cdca2fa058f730df2b93a35e3b0c538218"}, + {file = "mypy-1.8.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:42c6680d256ab35637ef88891c6bd02514ccb7e1122133ac96055ff458f93fc3"}, + {file = "mypy-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:720a5ca70e136b675af3af63db533c1c8c9181314d207568bbe79051f122669e"}, + {file = "mypy-1.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:028cf9f2cae89e202d7b6593cd98db6759379f17a319b5faf4f9978d7084cdc6"}, + {file = "mypy-1.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4e6d97288757e1ddba10dd9549ac27982e3e74a49d8d0179fc14d4365c7add66"}, + {file = "mypy-1.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f1478736fcebb90f97e40aff11a5f253af890c845ee0c850fe80aa060a267c6"}, + {file = "mypy-1.8.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42419861b43e6962a649068a61f4a4839205a3ef525b858377a960b9e2de6e0d"}, + {file = "mypy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:2b5b6c721bd4aabaadead3a5e6fa85c11c6c795e0c81a7215776ef8afc66de02"}, + {file = "mypy-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5c1538c38584029352878a0466f03a8ee7547d7bd9f641f57a0f3017a7c905b8"}, + {file = "mypy-1.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ef4be7baf08a203170f29e89d79064463b7fc7a0908b9d0d5114e8009c3a259"}, + {file = "mypy-1.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7178def594014aa6c35a8ff411cf37d682f428b3b5617ca79029d8ae72f5402b"}, + {file = "mypy-1.8.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ab3c84fa13c04aeeeabb2a7f67a25ef5d77ac9d6486ff33ded762ef353aa5592"}, + {file = "mypy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:99b00bc72855812a60d253420d8a2eae839b0afa4938f09f4d2aa9bb4654263a"}, + {file = "mypy-1.8.0-py3-none-any.whl", hash = "sha256:538fd81bb5e430cc1381a443971c0475582ff9f434c16cd46d2c66763ce85d9d"}, + {file = "mypy-1.8.0.tar.gz", hash = "sha256:6ff8b244d7085a0b425b56d327b480c3b29cafbd2eff27316a004f9a7391ae07"}, +] + +[package.dependencies] +mypy-extensions = ">=1.0.0" +typing-extensions = ">=4.1.0" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +install-types = ["pip"] +mypyc = ["setuptools (>=50)"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "nodeenv" +version = "1.8.0" +description = "Node.js virtual environment builder" +optional = false +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" +files = [ + {file = "nodeenv-1.8.0-py2.py3-none-any.whl", hash = "sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec"}, + {file = "nodeenv-1.8.0.tar.gz", hash = "sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2"}, +] + +[package.dependencies] +setuptools = "*" + +[[package]] +name = "option" +version = "2.1.0" +description = "Rust like Option and Result types in Python" +optional = false +python-versions = ">=3.7,<4" +files = [ + {file = "option-2.1.0-py3-none-any.whl", hash = "sha256:21ccd9a437dbee0341700367efb68e82065fd7a7dba09f8c3263cf2dc1a2b0e0"}, + {file = "option-2.1.0.tar.gz", hash = "sha256:9fe95a231e54724d2382a5124b55cd84b82339edf1d4e88d6977cedffbfeadf1"}, +] + +[[package]] +name = "packaging" +version = "23.2" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, +] + +[[package]] +name = "pathspec" +version = "0.12.1" +description = "Utility library for gitignore style pattern matching of file paths." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, +] + +[[package]] +name = "platformdirs" +version = "4.2.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] + +[[package]] +name = "pluggy" +version = "1.4.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "pre-commit" +version = "3.6.1" +description = "A framework for managing and maintaining multi-language pre-commit hooks." +optional = false +python-versions = ">=3.9" +files = [ + {file = "pre_commit-3.6.1-py2.py3-none-any.whl", hash = "sha256:9fe989afcf095d2c4796ce7c553cf28d4d4a9b9346de3cda079bcf40748454a4"}, + {file = "pre_commit-3.6.1.tar.gz", hash = "sha256:c90961d8aa706f75d60935aba09469a6b0bcb8345f127c3fbee4bdc5f114cf4b"}, +] + +[package.dependencies] +cfgv = ">=2.0.0" +identify = ">=1.0.0" +nodeenv = ">=0.11.1" +pyyaml = ">=5.1" +virtualenv = ">=20.10.0" + +[[package]] +name = "pydantic" +version = "2.6.1" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic-2.6.1-py3-none-any.whl", hash = "sha256:0b6a909df3192245cb736509a92ff69e4fef76116feffec68e93a567347bae6f"}, + {file = "pydantic-2.6.1.tar.gz", hash = "sha256:4fd5c182a2488dc63e6d32737ff19937888001e2a6d86e94b3f233104a5d1fa9"}, +] + +[package.dependencies] +annotated-types = ">=0.4.0" +pydantic-core = "2.16.2" +typing-extensions = ">=4.6.1" + +[package.extras] +email = ["email-validator (>=2.0.0)"] + +[[package]] +name = "pydantic-core" +version = "2.16.2" +description = "" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic_core-2.16.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3fab4e75b8c525a4776e7630b9ee48aea50107fea6ca9f593c98da3f4d11bf7c"}, + {file = "pydantic_core-2.16.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8bde5b48c65b8e807409e6f20baee5d2cd880e0fad00b1a811ebc43e39a00ab2"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2924b89b16420712e9bb8192396026a8fbd6d8726224f918353ac19c4c043d2a"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:16aa02e7a0f539098e215fc193c8926c897175d64c7926d00a36188917717a05"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:936a787f83db1f2115ee829dd615c4f684ee48ac4de5779ab4300994d8af325b"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:459d6be6134ce3b38e0ef76f8a672924460c455d45f1ad8fdade36796df1ddc8"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9ee4febb249c591d07b2d4dd36ebcad0ccd128962aaa1801508320896575ef"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40a0bd0bed96dae5712dab2aba7d334a6c67cbcac2ddfca7dbcc4a8176445990"}, + {file = "pydantic_core-2.16.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:870dbfa94de9b8866b37b867a2cb37a60c401d9deb4a9ea392abf11a1f98037b"}, + {file = "pydantic_core-2.16.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:308974fdf98046db28440eb3377abba274808bf66262e042c412eb2adf852731"}, + {file = "pydantic_core-2.16.2-cp310-none-win32.whl", hash = "sha256:a477932664d9611d7a0816cc3c0eb1f8856f8a42435488280dfbf4395e141485"}, + {file = "pydantic_core-2.16.2-cp310-none-win_amd64.whl", hash = "sha256:8f9142a6ed83d90c94a3efd7af8873bf7cefed2d3d44387bf848888482e2d25f"}, + {file = "pydantic_core-2.16.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:406fac1d09edc613020ce9cf3f2ccf1a1b2f57ab00552b4c18e3d5276c67eb11"}, + {file = "pydantic_core-2.16.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ce232a6170dd6532096cadbf6185271e4e8c70fc9217ebe105923ac105da9978"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a90fec23b4b05a09ad988e7a4f4e081711a90eb2a55b9c984d8b74597599180f"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8aafeedb6597a163a9c9727d8a8bd363a93277701b7bfd2749fbefee2396469e"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9957433c3a1b67bdd4c63717eaf174ebb749510d5ea612cd4e83f2d9142f3fc8"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0d7a9165167269758145756db43a133608a531b1e5bb6a626b9ee24bc38a8f7"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dffaf740fe2e147fedcb6b561353a16243e654f7fe8e701b1b9db148242e1272"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f8ed79883b4328b7f0bd142733d99c8e6b22703e908ec63d930b06be3a0e7113"}, + {file = "pydantic_core-2.16.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:cf903310a34e14651c9de056fcc12ce090560864d5a2bb0174b971685684e1d8"}, + {file = "pydantic_core-2.16.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:46b0d5520dbcafea9a8645a8164658777686c5c524d381d983317d29687cce97"}, + {file = "pydantic_core-2.16.2-cp311-none-win32.whl", hash = "sha256:70651ff6e663428cea902dac297066d5c6e5423fda345a4ca62430575364d62b"}, + {file = "pydantic_core-2.16.2-cp311-none-win_amd64.whl", hash = "sha256:98dc6f4f2095fc7ad277782a7c2c88296badcad92316b5a6e530930b1d475ebc"}, + {file = "pydantic_core-2.16.2-cp311-none-win_arm64.whl", hash = "sha256:ef6113cd31411eaf9b39fc5a8848e71c72656fd418882488598758b2c8c6dfa0"}, + {file = "pydantic_core-2.16.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:88646cae28eb1dd5cd1e09605680c2b043b64d7481cdad7f5003ebef401a3039"}, + {file = "pydantic_core-2.16.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7b883af50eaa6bb3299780651e5be921e88050ccf00e3e583b1e92020333304b"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bf26c2e2ea59d32807081ad51968133af3025c4ba5753e6a794683d2c91bf6e"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:99af961d72ac731aae2a1b55ccbdae0733d816f8bfb97b41909e143de735f522"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:02906e7306cb8c5901a1feb61f9ab5e5c690dbbeaa04d84c1b9ae2a01ebe9379"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5362d099c244a2d2f9659fb3c9db7c735f0004765bbe06b99be69fbd87c3f15"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ac426704840877a285d03a445e162eb258924f014e2f074e209d9b4ff7bf380"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b94cbda27267423411c928208e89adddf2ea5dd5f74b9528513f0358bba019cb"}, + {file = "pydantic_core-2.16.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:6db58c22ac6c81aeac33912fb1af0e930bc9774166cdd56eade913d5f2fff35e"}, + {file = "pydantic_core-2.16.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:396fdf88b1b503c9c59c84a08b6833ec0c3b5ad1a83230252a9e17b7dfb4cffc"}, + {file = "pydantic_core-2.16.2-cp312-none-win32.whl", hash = "sha256:7c31669e0c8cc68400ef0c730c3a1e11317ba76b892deeefaf52dcb41d56ed5d"}, + {file = "pydantic_core-2.16.2-cp312-none-win_amd64.whl", hash = "sha256:a3b7352b48fbc8b446b75f3069124e87f599d25afb8baa96a550256c031bb890"}, + {file = "pydantic_core-2.16.2-cp312-none-win_arm64.whl", hash = "sha256:a9e523474998fb33f7c1a4d55f5504c908d57add624599e095c20fa575b8d943"}, + {file = "pydantic_core-2.16.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:ae34418b6b389d601b31153b84dce480351a352e0bb763684a1b993d6be30f17"}, + {file = "pydantic_core-2.16.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:732bd062c9e5d9582a30e8751461c1917dd1ccbdd6cafb032f02c86b20d2e7ec"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b52776a2e3230f4854907a1e0946eec04d41b1fc64069ee774876bbe0eab55"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ef551c053692b1e39e3f7950ce2296536728871110e7d75c4e7753fb30ca87f4"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ebb892ed8599b23fa8f1799e13a12c87a97a6c9d0f497525ce9858564c4575a4"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa6c8c582036275997a733427b88031a32ffa5dfc3124dc25a730658c47a572f"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4ba0884a91f1aecce75202473ab138724aa4fb26d7707f2e1fa6c3e68c84fbf"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7924e54f7ce5d253d6160090ddc6df25ed2feea25bfb3339b424a9dd591688bc"}, + {file = "pydantic_core-2.16.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69a7b96b59322a81c2203be537957313b07dd333105b73db0b69212c7d867b4b"}, + {file = "pydantic_core-2.16.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7e6231aa5bdacda78e96ad7b07d0c312f34ba35d717115f4b4bff6cb87224f0f"}, + {file = "pydantic_core-2.16.2-cp38-none-win32.whl", hash = "sha256:41dac3b9fce187a25c6253ec79a3f9e2a7e761eb08690e90415069ea4a68ff7a"}, + {file = "pydantic_core-2.16.2-cp38-none-win_amd64.whl", hash = "sha256:f685dbc1fdadb1dcd5b5e51e0a378d4685a891b2ddaf8e2bba89bd3a7144e44a"}, + {file = "pydantic_core-2.16.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:55749f745ebf154c0d63d46c8c58594d8894b161928aa41adbb0709c1fe78b77"}, + {file = "pydantic_core-2.16.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b30b0dd58a4509c3bd7eefddf6338565c4905406aee0c6e4a5293841411a1286"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18de31781cdc7e7b28678df7c2d7882f9692ad060bc6ee3c94eb15a5d733f8f7"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5864b0242f74b9dd0b78fd39db1768bc3f00d1ffc14e596fd3e3f2ce43436a33"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8f9186ca45aee030dc8234118b9c0784ad91a0bb27fc4e7d9d6608a5e3d386c"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc6f6c9be0ab6da37bc77c2dda5f14b1d532d5dbef00311ee6e13357a418e646"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa057095f621dad24a1e906747179a69780ef45cc8f69e97463692adbcdae878"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6ad84731a26bcfb299f9eab56c7932d46f9cad51c52768cace09e92a19e4cf55"}, + {file = "pydantic_core-2.16.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3b052c753c4babf2d1edc034c97851f867c87d6f3ea63a12e2700f159f5c41c3"}, + {file = "pydantic_core-2.16.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e0f686549e32ccdb02ae6f25eee40cc33900910085de6aa3790effd391ae10c2"}, + {file = "pydantic_core-2.16.2-cp39-none-win32.whl", hash = "sha256:7afb844041e707ac9ad9acad2188a90bffce2c770e6dc2318be0c9916aef1469"}, + {file = "pydantic_core-2.16.2-cp39-none-win_amd64.whl", hash = "sha256:9da90d393a8227d717c19f5397688a38635afec89f2e2d7af0df037f3249c39a"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5f60f920691a620b03082692c378661947d09415743e437a7478c309eb0e4f82"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:47924039e785a04d4a4fa49455e51b4eb3422d6eaacfde9fc9abf8fdef164e8a"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6294e76b0380bb7a61eb8a39273c40b20beb35e8c87ee101062834ced19c545"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe56851c3f1d6f5384b3051c536cc81b3a93a73faf931f404fef95217cf1e10d"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9d776d30cde7e541b8180103c3f294ef7c1862fd45d81738d156d00551005784"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:72f7919af5de5ecfaf1eba47bf9a5d8aa089a3340277276e5636d16ee97614d7"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:4bfcbde6e06c56b30668a0c872d75a7ef3025dc3c1823a13cf29a0e9b33f67e8"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ff7c97eb7a29aba230389a2661edf2e9e06ce616c7e35aa764879b6894a44b25"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9b5f13857da99325dcabe1cc4e9e6a3d7b2e2c726248ba5dd4be3e8e4a0b6d0e"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a7e41e3ada4cca5f22b478c08e973c930e5e6c7ba3588fb8e35f2398cdcc1545"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60eb8ceaa40a41540b9acae6ae7c1f0a67d233c40dc4359c256ad2ad85bdf5e5"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7beec26729d496a12fd23cf8da9944ee338c8b8a17035a560b585c36fe81af20"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:22c5f022799f3cd6741e24f0443ead92ef42be93ffda0d29b2597208c94c3753"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:eca58e319f4fd6df004762419612122b2c7e7d95ffafc37e890252f869f3fb2a"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ed957db4c33bc99895f3a1672eca7e80e8cda8bd1e29a80536b4ec2153fa9804"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:459c0d338cc55d099798618f714b21b7ece17eb1a87879f2da20a3ff4c7628e2"}, + {file = "pydantic_core-2.16.2.tar.gz", hash = "sha256:0ba503850d8b8dcc18391f10de896ae51d37fe5fe43dbfb6a35c5c5cad271a06"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + +[[package]] +name = "pygments" +version = "2.17.2" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, + {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, +] + +[package.extras] +plugins = ["importlib-metadata"] +windows-terminal = ["colorama (>=0.4.6)"] + +[[package]] +name = "pyproject-api" +version = "1.6.1" +description = "API to interact with the python pyproject.toml based projects" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyproject_api-1.6.1-py3-none-any.whl", hash = "sha256:4c0116d60476b0786c88692cf4e325a9814965e2469c5998b830bba16b183675"}, + {file = "pyproject_api-1.6.1.tar.gz", hash = "sha256:1817dc018adc0d1ff9ca1ed8c60e1623d5aaca40814b953af14a9cf9a5cae538"}, +] + +[package.dependencies] +packaging = ">=23.1" + +[package.extras] +docs = ["furo (>=2023.8.19)", "sphinx (<7.2)", "sphinx-autodoc-typehints (>=1.24)"] +testing = ["covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)", "setuptools (>=68.1.2)", "wheel (>=0.41.2)"] + +[[package]] +name = "pytest" +version = "7.4.4" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, + {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-cov" +version = "4.1.0" +description = "Pytest plugin for measuring coverage." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, + {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, +] + +[package.dependencies] +coverage = {version = ">=5.2.1", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] + +[[package]] +name = "python-gitlab" +version = "4.4.0" +description = "A python wrapper for the GitLab API" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "python-gitlab-4.4.0.tar.gz", hash = "sha256:1d117bf7b433ae8255e5d74e72c660978f50ee85eb62248c9fb52ef43c3e3814"}, + {file = "python_gitlab-4.4.0-py3-none-any.whl", hash = "sha256:cdad39d016f59664cdaad0f878f194c79cb4357630776caa9a92c1da25c8d986"}, +] + +[package.dependencies] +requests = ">=2.25.0" +requests-toolbelt = ">=0.10.1" + +[package.extras] +autocompletion = ["argcomplete (>=1.10.0,<3)"] +yaml = ["PyYaml (>=6.0.1)"] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-toolbelt" +version = "1.0.0" +description = "A utility belt for advanced users of python-requests" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6"}, + {file = "requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06"}, +] + +[package.dependencies] +requests = ">=2.0.1,<3.0.0" + +[[package]] +name = "rich" +version = "13.7.0" +description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "rich-13.7.0-py3-none-any.whl", hash = "sha256:6da14c108c4866ee9520bbffa71f6fe3962e193b7da68720583850cd4548e235"}, + {file = "rich-13.7.0.tar.gz", hash = "sha256:5cb5123b5cf9ee70584244246816e9114227e0b98ad9176eede6ad54bf5403fa"}, +] + +[package.dependencies] +markdown-it-py = ">=2.2.0" +pygments = ">=2.13.0,<3.0.0" + +[package.extras] +jupyter = ["ipywidgets (>=7.5.1,<9)"] + +[[package]] +name = "setuptools" +version = "69.1.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6"}, + {file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "shellingham" +version = "1.5.4" +description = "Tool to Detect Surrounding Shell" +optional = false +python-versions = ">=3.7" +files = [ + {file = "shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686"}, + {file = "shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de"}, +] + +[[package]] +name = "smmap" +version = "5.0.1" +description = "A pure Python implementation of a sliding window memory map manager" +optional = false +python-versions = ">=3.7" +files = [ + {file = "smmap-5.0.1-py3-none-any.whl", hash = "sha256:e6d8668fa5f93e706934a62d7b4db19c8d9eb8cf2adbb75ef1b675aa332b69da"}, + {file = "smmap-5.0.1.tar.gz", hash = "sha256:dceeb6c0028fdb6734471eb07c0cd2aae706ccaecab45965ee83f11c8d3b1f62"}, +] + +[[package]] +name = "soupsieve" +version = "2.5" +description = "A modern CSS selector implementation for Beautiful Soup." +optional = false +python-versions = ">=3.8" +files = [ + {file = "soupsieve-2.5-py3-none-any.whl", hash = "sha256:eaa337ff55a1579b6549dc679565eac1e3d000563bcb1c8ab0d0fefbc0c2cdc7"}, + {file = "soupsieve-2.5.tar.gz", hash = "sha256:5663d5a7b3bfaeee0bc4372e7fc48f9cff4940b3eec54a6451cc5299f1097690"}, +] + +[[package]] +name = "tox" +version = "4.12.1" +description = "tox is a generic virtualenv management and test command line tool" +optional = false +python-versions = ">=3.8" +files = [ + {file = "tox-4.12.1-py3-none-any.whl", hash = "sha256:c07ea797880a44f3c4f200ad88ad92b446b83079d4ccef89585df64cc574375c"}, + {file = "tox-4.12.1.tar.gz", hash = "sha256:61aafbeff1bd8a5af84e54ef6e8402f53c6a6066d0782336171ddfbf5362122e"}, +] + +[package.dependencies] +cachetools = ">=5.3.2" +chardet = ">=5.2" +colorama = ">=0.4.6" +filelock = ">=3.13.1" +packaging = ">=23.2" +platformdirs = ">=4.1" +pluggy = ">=1.3" +pyproject-api = ">=1.6.1" +virtualenv = ">=20.25" + +[package.extras] +docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-argparse-cli (>=1.11.1)", "sphinx-autodoc-typehints (>=1.25.2)", "sphinx-copybutton (>=0.5.2)", "sphinx-inline-tabs (>=2023.4.21)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.11)"] +testing = ["build[virtualenv] (>=1.0.3)", "covdefaults (>=2.3)", "detect-test-pollution (>=1.2)", "devpi-process (>=1)", "diff-cover (>=8.0.2)", "distlib (>=0.3.8)", "flaky (>=3.7)", "hatch-vcs (>=0.4)", "hatchling (>=1.21)", "psutil (>=5.9.7)", "pytest (>=7.4.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-xdist (>=3.5)", "re-assert (>=1.1)", "time-machine (>=2.13)", "wheel (>=0.42)"] + +[[package]] +name = "typer" +version = "0.9.0" +description = "Typer, build great CLIs. Easy to code. Based on Python type hints." +optional = false +python-versions = ">=3.6" +files = [ + {file = "typer-0.9.0-py3-none-any.whl", hash = "sha256:5d96d986a21493606a358cae4461bd8cdf83cbf33a5aa950ae629ca3b51467ee"}, + {file = "typer-0.9.0.tar.gz", hash = "sha256:50922fd79aea2f4751a8e0408ff10d2662bd0c8bbfa84755a699f3bada2978b2"}, +] + +[package.dependencies] +click = ">=7.1.1,<9.0.0" +colorama = {version = ">=0.4.3,<0.5.0", optional = true, markers = "extra == \"all\""} +rich = {version = ">=10.11.0,<14.0.0", optional = true, markers = "extra == \"all\""} +shellingham = {version = ">=1.3.0,<2.0.0", optional = true, markers = "extra == \"all\""} +typing-extensions = ">=3.7.4.3" + +[package.extras] +all = ["colorama (>=0.4.3,<0.5.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"] +dev = ["autoflake (>=1.3.1,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "pre-commit (>=2.17.0,<3.0.0)"] +doc = ["cairosvg (>=2.5.2,<3.0.0)", "mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pillow (>=9.3.0,<10.0.0)"] +test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.910)", "pytest (>=4.4.0,<8.0.0)", "pytest-cov (>=2.10.0,<5.0.0)", "pytest-sugar (>=0.9.4,<0.10.0)", "pytest-xdist (>=1.32.0,<4.0.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"] + +[[package]] +name = "typing-extensions" +version = "4.9.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, + {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, +] + +[[package]] +name = "urllib3" +version = "2.2.0" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, + {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "virtualenv" +version = "20.25.0" +description = "Virtual Python Environment builder" +optional = false +python-versions = ">=3.7" +files = [ + {file = "virtualenv-20.25.0-py3-none-any.whl", hash = "sha256:4238949c5ffe6876362d9c0180fc6c3a824a7b12b80604eeb8085f2ed7460de3"}, + {file = "virtualenv-20.25.0.tar.gz", hash = "sha256:bf51c0d9c7dd63ea8e44086fa1e4fb1093a31e963b86959257378aef020e1f1b"}, +] + +[package.dependencies] +distlib = ">=0.3.7,<1" +filelock = ">=3.12.2,<4" +platformdirs = ">=3.9.1,<5" + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] + +[[package]] +name = "wget" +version = "3.2" +description = "pure python download utility" +optional = false +python-versions = "*" +files = [ + {file = "wget-3.2.zip", hash = "sha256:35e630eca2aa50ce998b9b1a127bb26b30dfee573702782aa982f875e3f16061"}, +] + +[[package]] +name = "yattag" +version = "1.15.2" +description = "Generate HTML or XML in a pythonic way. Pure python alternative to web template engines.Can fill HTML forms with default values and error messages." +optional = false +python-versions = "*" +files = [ + {file = "yattag-1.15.2.tar.gz", hash = "sha256:aad9f540bd22dc503e5b5506cc47856facf081aa71fd35f727371b63e1e402bf"}, +] + +[metadata] +lock-version = "2.0" +python-versions = ">=3.11,<4.0" +content-hash = "f7c80c6fec3d3dc99297fe910707e761fbab4df2847a987a0a7771a6514bad20" diff --git a/poetry.toml b/poetry.toml new file mode 100644 index 00000000..ab1033bd --- /dev/null +++ b/poetry.toml @@ -0,0 +1,2 @@ +[virtualenvs] +in-project = true diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..569b851c --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,123 @@ +[tool.poetry] +name = "smtcomp" +version = "0.0.1" +description = "Tools used for the organization of the SMT competition" +authors = ["SMTCOMP organizers "] +repository = "https://github.com/smtcomp/smtcomp.github.io" +documentation = "https://smtcomp.github.io/smtcomp/" +readme = "README.md" +packages = [ + {include = "smtcomp"} +] + +[tool.poetry.scripts] +smtcomp = "smtcomp.main:app" + +[tool.poetry.dependencies] +python = ">=3.11,<4.0" +typer = {extras = ["all"], version = "^0.9.0"} +rich = "^13.7.0" +pydantic = "^2.5.0" +email-validator = "^2.1.0" +python-gitlab = "*" +gitpython = "*" +yattag = "*" +wget = "*" +option = "*" +requests = "*" +bs4 = "*" +benchexec = "*" + +[tool.poetry.group.dev.dependencies] +pytest = "^7.2.0" +pytest-cov = "^4.0.0" +deptry = "^0.12.0" +mypy = "^1.5.1" +pre-commit = "^3.4.0" +tox = "^4.11.1" + + + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" + +[tool.black] +line-length = 120 +target-version = ['py310'] +preview = true +exclude = 'archive|web' + +[tool.mypy] +files = ["smtcomp"] +disallow_untyped_defs = "True" +disallow_any_unimported = "True" +no_implicit_optional = "True" +check_untyped_defs = "True" +warn_return_any = "True" +warn_unused_ignores = "True" +show_error_codes = "True" +ignore_missing_imports = "True" + +[tool.pytest.ini_options] +testpaths = ["tests"] + +[tool.ruff] +target-version = "py37" +line-length = 120 +fix = true +lint.select = [ + # flake8-2020 + "YTT", + # flake8-bandit + "S", + # flake8-bugbear + "B", + # flake8-builtins + "A", + # flake8-comprehensions + "C4", + # flake8-debugger + "T10", + # flake8-simplify + "SIM", + # isort + "I", + # mccabe + "C90", + # pycodestyle + "E", "W", + # pyflakes + "F", + # pygrep-hooks + "PGH", + # pyupgrade + "UP", + # ruff + "RUF", + # tryceratops + "TRY", +] + +lint.ignore = [ + # LineTooLong + "E501", + # DoNotAssignLambda + "E731", + # Shadow builtin value + "A001", +] + +[tool.coverage.report] +skip_empty = true + +[tool.coverage.run] +branch = true +source = ["smtcomp"] + + +[tool.ruff.lint.per-file-ignores] +"tests/*" = ["S101"] + +[tool.deptry.per_rule_ignores] +DEP002 = ["email-validator"] diff --git a/smtcomp/__init__.py b/smtcomp/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/smtcomp/archive.py b/smtcomp/archive.py new file mode 100644 index 00000000..d0d75f2c --- /dev/null +++ b/smtcomp/archive.py @@ -0,0 +1,103 @@ +from pathlib import Path +from smtcomp.unpack import extract_all_with_executable_permission +import zipfile +from typing import Optional + +import wget +from rich import print +from rich.progress import Progress + +from smtcomp import defs + + +def cache_dir() -> str: + return "download" + + +def unpack_dir() -> str: + return "unpack" + + +def archive_cache_dir(archive: defs.Archive, dst: Path) -> Path: + return dst.joinpath(cache_dir(), archive.uniq_id()) + + +def archive_unpack_dir(archive: defs.Archive, dst: Path) -> Path: + return dst.joinpath(unpack_dir(), archive.uniq_id()) + + +def is_archive_cache_present(archive: defs.Archive, dst: Path) -> Optional[Path]: + d = archive_cache_dir(archive, dst) + d.mkdir(parents=True, exist_ok=True) + return next(d.iterdir(), None) + + +def is_unpack_present(archive: defs.Archive, dst: Path) -> bool: + d = archive_unpack_dir(archive, dst) + d.mkdir(parents=True, exist_ok=True) + return any(True for _ in d.iterdir()) + + +def find_command(command: defs.Command, archive: defs.Archive, dst: Path) -> Path: + d = archive_unpack_dir(archive, dst) + if not (d.exists()): + raise Exception("Archive not unpacked", archive) + path = d.joinpath(command.binary) + if path.exists(): + return path + possibilities = list(d.rglob(command.binary)) + if len(possibilities) == 0: + raise Exception("Command not found in the archive", command, archive) + if len(possibilities) >= 2: + raise Exception("Too many candidate for the command", command, archive, possibilities) + return possibilities[0] + + +def download(archive: defs.Archive, dst: Path) -> None: + dst.joinpath(cache_dir()).mkdir(parents=True, exist_ok=True) + x = is_archive_cache_present(archive, dst) + if x: + print("archive in cache:", x) + else: + with Progress() as progress: + task1 = progress.add_task("[red]Downloading...") + + # create this bar_progress method which is invoked automatically from wget + def bar_progress(current: float, total: float, width: int) -> None: + progress.update(task1, completed=current, total=total) + + y = archive_cache_dir(archive, dst) + y.mkdir(parents=True, exist_ok=True) + wget.download(str(archive.url), str(y), bar=bar_progress) + print("Download done") + + +import subprocess + + +# For testing with last year final solver we need some patch +# on starexec things seems to be executed particularly +def patch(udir: Path) -> None: + if udir.name == "e0873e12a04fcfcf1bf2e449f04101c2812d4c533fb634beb6e92e8eaa6d78f7": + # For COLIBRI + subprocess.run(["chmod", "-R", "u+x", udir.joinpath("COLIBRI 2023_05_10")]) + subprocess.run(["mv", udir.joinpath("COLIBRI 2023_05_10"), udir.joinpath("COLIBRI_2023_05_10")]) + + +def unpack(archive: defs.Archive, dst: Path) -> None: + dst.joinpath(unpack_dir()).mkdir(parents=True, exist_ok=True) + archive_file = is_archive_cache_present(archive, dst) + if not archive_file: + raise ValueError("unpack ith archive") + + dst.mkdir(parents=True, exist_ok=True) + udir = archive_unpack_dir(archive, dst) + if is_unpack_present(archive, dst): + print("archive already unpacked:", udir) + else: + print("unpack archive", archive_file) + extract_all_with_executable_permission(archive_file, udir) + patch(udir) + if not (is_unpack_present(archive, dst)): + print("[red]Empty archive", archive_file) + exit(1) diff --git a/smtcomp/benchexec.py b/smtcomp/benchexec.py new file mode 100644 index 00000000..dc53fd3c --- /dev/null +++ b/smtcomp/benchexec.py @@ -0,0 +1,98 @@ +from pathlib import Path +from typing import List, cast, Dict, Optional + +from yattag import Doc + +from smtcomp import defs +from smtcomp.archive import find_command +from pydantic import BaseModel + +import shlex + + +class CmdTask(BaseModel): + name: str + options: List[str] + includesfiles: List[str] + + +def generate_xml(timelimit_s: int, memlimit_M: int, cpuCores: int, cmdtasks: List[CmdTask], file: Path) -> None: + doc, tag, text = Doc().tagtext() + + doc.asis('') + doc.asis( + '' + ) + with tag( + "benchmark", + tool=f"smtcomp.tool", + timelimit=f"{timelimit_s}s", + hardlimit=f"{timelimit_s+30}s", + memlimit=f"{memlimit_M} MB", + cpuCores=f"{cpuCores}", + displayname="SC", + ): + for cmdtask in cmdtasks: + for includesfile in cmdtask.includesfiles: + with tag("rundefinition", name=f"{cmdtask.name},{includesfile}"): + for option in cmdtask.options: + with tag("option"): + text(option) + with tag("tasks", name="task"): + with tag("includesfile"): + text(includesfile) + + file.write_text(doc.getvalue()) + + +def cmdtask_for_submission(s: defs.Submission, cachedir: Path) -> List[CmdTask]: + res: List[CmdTask] = [] + i = -1 + for p in s.participations.root: + command = cast(defs.Command, p.command if p.command else s.command) + archive = cast(defs.Archive, p.archive if p.archive else s.archive) + for track, divisions in p.get().items(): + i = i + 1 + match track: + case defs.Track.Incremental: + suffix = "_inc" + mode = "trace" + case defs.Track.ModelValidation: + suffix = "_model" + mode = "direct" + case defs.Track.UnsatCore: + suffix = "" + mode = "direct" + case defs.Track.ProofExhibition: + suffix = "" + mode = "direct" + case defs.Track.SingleQuery: + suffix = "" + mode = "direct" + case defs.Track.Cloud | defs.Track.Parallel: + continue + tasks: list[str] = [] + for _, logics in divisions.items(): + tasks.extend([logic + suffix for logic in logics]) + if tasks: + executable_path = find_command(command, archive, cachedir) + executable = str(executable_path.resolve()) + if command.compa_starexec: + assert command.arguments == [] + dirname = str(executable_path.parent.resolve()) + options = [ + "bash", + "-c", + f'FILE=$(realpath $1); (cd {shlex.quote(dirname)}; exec {shlex.quote(executable)} "$FILE")', + "compa_starexec", + ] + else: + options = [executable] + command.arguments + cmdtask = CmdTask( + name=f"{s.name},{i},{track}", + options=[mode] + options, + includesfiles=tasks, + ) + res.append(cmdtask) + return res diff --git a/smtcomp/benchmarks.py b/smtcomp/benchmarks.py new file mode 100644 index 00000000..e198108b --- /dev/null +++ b/smtcomp/benchmarks.py @@ -0,0 +1,212 @@ +from __future__ import annotations + +import subprocess +import time +from operator import length_hint +from pathlib import Path +from types import TracebackType +from typing import Callable, Iterable, Sequence, TypeVar + +import git +import gitlab +from rich import console, progress +from rich.progress import Progress, _TrackThread + +ProgressType = TypeVar("ProgressType") + + +class GitRemoteProgress(git.RemoteProgress): + """ + From https://stackoverflow.com/questions/51045540/python-progress-bar-for-git-clone + """ + + OP_CODES = [ + "BEGIN", + "CHECKING_OUT", + "COMPRESSING", + "COUNTING", + "END", + "FINDING_SOURCES", + "RECEIVING", + "RESOLVING", + "WRITING", + ] + OP_CODE_MAP = {getattr(git.RemoteProgress, _op_code): _op_code for _op_code in OP_CODES} + + def __init__(self) -> None: + super().__init__() + self.progressbar = progress.Progress( + progress.SpinnerColumn(), + # *progress.Progress.get_default_columns(), + progress.TextColumn("[progress.description]{task.description}"), + progress.BarColumn(), + progress.TextColumn("[progress.percentage]{task.percentage:>3.0f}%"), + "eta", + progress.TimeRemainingColumn(), + progress.TextColumn("{task.fields[message]}"), + console=console.Console(), + transient=False, + auto_refresh=False, + ) + self.progressbar.start() + self.active_task: progress.TaskID | None = None + + def __del__(self) -> None: + # logger.info("Destroying bar...") + self.progressbar.stop() + + @classmethod + def get_curr_op(cls, op_code: int) -> str: + """Get OP name from OP code.""" + # Remove BEGIN- and END-flag and get op name + op_code_masked = op_code & cls.OP_MASK + return cls.OP_CODE_MAP.get(op_code_masked, "?").title() + + def update( + self, + op_code: int, + cur_count: str | float, + max_count: str | float | None = None, + message: str | None = "", + ) -> None: + # Start new bar on each BEGIN-flag + if op_code & self.BEGIN: + self.curr_op = self.get_curr_op(op_code) + # logger.info("Next: %s", self.curr_op) + self.active_task = self.progressbar.add_task( + description=self.curr_op, + total=float(max_count) if max_count else None, + message=message, + ) + + if self.active_task: + # Should always be set if protocol respected + self.progressbar.update( + task_id=self.active_task, + completed=float(cur_count), + message=message, + ) + + # End progress monitoring on each END-flag + if op_code & self.END: + # logger.info("Done: %s", self.curr_op) + self.progressbar.update( + task_id=self.active_task, + message=f"[bright_black]{message}", + ) + + @classmethod + def __enter__(cls) -> GitRemoteProgress: + return GitRemoteProgress() + + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> None: + self.progressbar.__exit__(exc_type, exc_val, exc_tb) + + +def track( + progress: Progress, + sequence: Iterable[ProgressType] | Sequence[ProgressType], + total: float | None = None, + task_id: progress.TaskID | None = None, + description: str = "Working...", + update_period: float = 0.1, +) -> Iterable[tuple[(Callable[[str], None]), ProgressType]]: + """Track progress by iterating over a sequence. + + Args: + sequence (Sequence[ProgressType]): A sequence of values you want to iterate over and track progress. + total: (float, optional): Total number of steps. Default is len(sequence). + task_id: (TaskID): Task to track. Default is new task. + description: (str, optional): Description of task, if new task is created. + update_period (float, optional): Minimum time (in seconds) between calls to update(). Defaults to 0.1. + + Returns: + Iterable[ProgressType]: An iterable of values taken from the provided sequence. + """ + if total is None: + total = float(length_hint(sequence)) or None + + if task_id is None: + task_id = progress.add_task(description, total=total, message="") + else: + progress.update(task_id, total=total, message="") + + def update_message(message: str) -> None: + progress.update(task_id, total=total, message=message) + + if progress.live.auto_refresh: + with _TrackThread(progress, task_id, update_period) as track_thread: + for value in sequence: + yield (update_message, value) + track_thread.completed += 1 + else: + advance = progress.advance + refresh = progress.refresh + for value in sequence: + yield (update_message, value) + advance(task_id, 1) + refresh() + + +gl = gitlab.Gitlab("https://clc-gitlab.cs.uiowa.edu:2443") + + +class P: + def __init__(self, i: int) -> None: + self.name = "Project" + str(i) + self.http_url_to_repo = self.name + + def __str__(self) -> str: + return self.name + + +class MyNumbers: + def __iter__(self) -> MyNumbers: + self.a = 1 + return self + + def __next__(self) -> P: + x = self.a + time.sleep(0.5) + self.a += 1 + if x < 5: + return P(x) + else: + raise StopIteration + + +def clone_group(name: str, dstdir: Path, dryrun: bool) -> None: + """clone the group named name in directory dir""" + with GitRemoteProgress() as gitprogress: + progress = gitprogress.progressbar + progress.console.print("Start downloading benchmarks:", name) + # group = gl.groups.get(name) + # projects = list(progress.track(group.projects.list(iterator=True), description="List logics...")) + projects = [p for _, p in track(progress, iter(MyNumbers()), description="List logics...", total=6)] + dstdir.mkdir(exist_ok=True, parents=True) + for update_message, project in track(progress, projects, description="Downloading..."): + update_message(project.name) + if project.name in ["QF_BV_legacy", "Sage2_legacy"]: + progress.console.print(project.name, "skipped") + continue + path = dstdir.joinpath(project.name) + if path.exists(): + update_message(project.name + " update") + if dryrun: + time.sleep(0.5) + else: + subprocess.run(["git", "-C", path, "fetch", "--depth=1"]) + subprocess.run(["git", "-C", path, "reset", "--hard", "FETCH_HEAD"]) + else: + update_message(project.name + " clone") + if dryrun: + time.sleep(0.5) + else: + git.Repo.clone_from( + url=project.http_url_to_repo, + to_path=path, + depth=1, + progress=gitprogress.update, + ) diff --git a/smtcomp/convert_csv.py b/smtcomp/convert_csv.py new file mode 100644 index 00000000..35014461 --- /dev/null +++ b/smtcomp/convert_csv.py @@ -0,0 +1,177 @@ +import csv as csv +import re +from enum import Enum +from pathlib import Path +from typing import Dict, List, Optional, Tuple + +import requests +from bs4 import BeautifulSoup +from pydantic.networks import HttpUrl +from option import Option, Some +from rich.progress import track + +import smtcomp.defs as defs + + +class CsvColumn(str, Enum): + name = "Solver Name" + homepage = "Solver homepage" + system_description = "System description URL" + title_system_description = "System description name" + starexec_id = "Solver ID" + wrapped_tool = "Wrapper Tool" + derived_tool = "Derived Tool" + track_single_query = "Single Query Regex" + track_incremental = "Incremental Regex" + track_unsat_core = "Unsat Core Regex" + track_model_validation = "Model Validation Regex" + track_proof = "Proof Exhibition Regex" + track_parallel = "Parallel Regex" + track_cloud = "Cloud Regex" + contact = "Contact" + contributors = "Team Members" + + +# cache for tests/solvers_divisions_final.csv +cache = { + 44384: ["default"], + 44702: ["default"], + 44703: ["default"], + 44765: ["default"], + 44767: ["def"], + 44672: ["default"], + 44713: ["default"], + 44741: ["default"], + 44742: ["default"], + 44715: ["default"], + 44759: ["default", "incremental", "proof"], + 44756: ["default"], + 44484: ["vampire_smtcomp"], + 44479: ["default"], + 44751: ["default"], + 44755: ["default"], + 39111: ["default"], + 41385: ["default"], + 44761: ["default", "incremental"], + 44707: ["default"], + 44716: ["default"], + 44737: ["proof", "sq", "mv", "uc"], + 44738: ["default"], + 44736: ["proof"], + 44790: ["default", "incremental"], + 44678: ["default"], + 44764: ["default"], + 44768: ["iprover_SMT"], + 44760: ["default"], +} + + +def configurations_on_starexec(id: int) -> list[str]: + if id in cache: + return cache[id] + + URL = "https://www.starexec.org/starexec/secure/details/solver.jsp?id=" + str(id) + page = requests.get(URL) + soup = BeautifulSoup(page.content, "html.parser") + + cache[id] = [i.a.text.strip() for i in soup.find_all(name="td", id="configItem")] + return cache[id] + + +def convert_row(row: Dict[str, str], dstdir: Path) -> defs.Submission: + # print(row[CsvColumn.name]) + + def archive_of_solver_id(solver_id: int) -> defs.Archive: + return defs.Archive( + url=HttpUrl(f"https://www.starexec.org/starexec/secure/download?type=solver&id={solver_id}"), + h=None, + ) + + solver_ids = row[CsvColumn.starexec_id].split(";") + + def find_archive_id(track_id: Option[str]) -> Option[int]: + r = ( + re.compile(" *([0-9]+)+\\(" + track_id.unwrap() + "\\) *") + if track_id.is_some + else re.compile(" *([0-9]+) *") + ) + for solver_id in solver_ids: + g = r.fullmatch(solver_id) + if g: + return Some(int(g.group(1))) + return Option.NONE() + + def has_configuration(id: int, track_id: str) -> bool: + return track_id in configurations_on_starexec(id) + + def mk_cmd(conf: str) -> defs.Command: + return defs.Command(binary="bin/starexec_run_" + conf, compa_starexec=True) + + def find_archive(track_id: Option[str]) -> Tuple[Option[defs.Archive], Option[defs.Command]]: + main_id = find_archive_id(track_id) + archive = main_id.map(archive_of_solver_id) + + id = main_id if main_id else find_archive_id(Option.NONE()) + track_id2 = track_id.unwrap_or("default") + if id: + if has_configuration(id.unwrap(), track_id2): + command = Some(mk_cmd(track_id2)) + elif has_configuration(id.unwrap(), "default"): + command = Some(mk_cmd("default")) + elif track_id2 == "default" and len(configurations_on_starexec(id.unwrap())) == 1: + # Seems that if there is only one configuration it is accepted + # as the default + command = Some(mk_cmd(configurations_on_starexec(id.unwrap())[0])) + else: + command = Option.NONE() + + else: + command = Option.NONE() + return archive, command + + archive, command = find_archive(Option.NONE()) + contributors = [ + defs.Contributor(name=name) for line in row[CsvColumn.contributors].splitlines() for name in line.split(",") + ] + participations: List[defs.Participation] = [] + + def add_track(col: CsvColumn, track: defs.Track, shortcut: str) -> None: + if row[col] != "" and row[col] != "-": + archive, command = find_archive(Some(shortcut)) + participations.append( + defs.Participation( + tracks=[track], + logics=defs.Logics.from_regexp(row[col].strip()), + archive=archive.unwrap_or(None), + command=command.unwrap_or(None), + ) + ) + + add_track(CsvColumn.track_single_query, defs.Track.SingleQuery, "sq") + add_track(CsvColumn.track_unsat_core, defs.Track.UnsatCore, "uc") + add_track(CsvColumn.track_incremental, defs.Track.Incremental, "inc") + add_track(CsvColumn.track_model_validation, defs.Track.ModelValidation, "mv") + add_track(CsvColumn.track_proof, defs.Track.ProofExhibition, "proof") + add_track(CsvColumn.track_parallel, defs.Track.Parallel, "par") + add_track(CsvColumn.track_cloud, defs.Track.Cloud, "cloud") + return defs.Submission( + name=row[CsvColumn.name], + contributors=contributors, + contacts=[defs.NameEmail(name="", email=row[CsvColumn.contact])], + archive=archive.unwrap_or(None), + website=HttpUrl(row[CsvColumn.homepage]), + system_description=HttpUrl(row[CsvColumn.system_description]), + command=command.unwrap_or(None), + solver_type=defs.SolverType.standalone, + participations=defs.Participations(root=participations), + ) + + +def convert_csv(file: Path, dstdir: Path) -> None: + with open(file) as dcsv: + registrations = csv.DictReader(dcsv) + for row in track(list(registrations), description="Asking StarExec for prover configurations"): + if row[CsvColumn.starexec_id] != "-1": + submission = convert_row(row, dstdir) + with open(Path.joinpath(dstdir, submission.name + ".json"), "w") as f: + f.write(submission.model_dump_json()) diff --git a/smtcomp/defs.py b/smtcomp/defs.py new file mode 100644 index 00000000..1aa4f500 --- /dev/null +++ b/smtcomp/defs.py @@ -0,0 +1,1095 @@ +from __future__ import annotations + +import hashlib +import re +from enum import Enum +from pathlib import Path +from typing import Any + +from pydantic import BaseModel, Field, RootModel, model_validator +from pydantic.networks import HttpUrl, validate_email + + +class NameEmail(BaseModel): + """ + Name and valide email "name " + """ + + model_config = { + "json_schema_extra": { + "examples": [ + "Jane Smith ", + ] + } + } + + name: str + email: str + + @model_validator(mode="before") + @classmethod + def split_email(cls, data: NameEmail | str) -> Any: + if isinstance(data, str): + name, email = validate_email(data) + return {"name": name, "email": email} + return data + + def __str__(self) -> str: + return f"{self.name} <{self.email}>" + + +class Hash(BaseModel, extra="forbid"): + sha256: str | None = None + sha512: str | None = None + + @model_validator(mode="after") + def check_one_set(self) -> Hash: + if self.sha256 is None and self.sha512 is None: + raise ValueError("one hash type is required") + return self + + +class Contributor(BaseModel, extra="forbid"): + """ + Contributors in the developement of the solver. If only name is provided, + it can be directly given. + """ + + model_config = { + "json_schema_extra": { + "examples": [ + "Jane Smith", + { + "name": "Jane Smith", + "website": "http://jane.smith.name", + }, + ] + } + } + + name: str + website: HttpUrl | None = None + + @model_validator(mode="before") + @classmethod + def name_is_default_field(cls, data: Any) -> Any: + if isinstance(data, str): + return {"name": data} + return data + + +class SolverType(str, Enum): + wrapped = "wrapped" + derived = "derived" + standalone = "Standalone" + + +# class RegexpTracks: + + +class Track(str, Enum): + UnsatCore = "UnsatCore" + SingleQuery = "SingleQuery" + ProofExhibition = "ProofExhibition" + ModelValidation = "ModelValidation" + Incremental = "Incremental" + Cloud = "Cloud" + Parallel = "Parallel" + + +class Division(str, Enum): + Arith = "Arith" + Bitvec = "Bitvec" + Equality = "Equality" + Equality_LinearArith = "Equality+LinearArith" + Equality_MachineArith = "Equality+MachineArith" + Equality_NonLinearArith = "Equality+NonLinearArith" + FPArith = "FPArith" + QF_ADT_BitVec = "QF_ADT+BitVec" + QF_ADT_LinArith = "QF_ADT+LinArith" + QF_Bitvec = "QF_Bitvec" + QF_Datatypes = "QF_Datatypes" + QF_Equality = "QF_Equality" + QF_Equality_Bitvec = "QF_Equality+Bitvec" + QF_Equality_Bitvec_Arith = "QF_Equality+Bitvec+Arith" + QF_Equality_LinearArith = "QF_Equality+LinearArith" + QF_Equality_NonLinearArith = "QF_Equality+NonLinearArith" + QF_FPArith = "QF_FPArith" + QF_LinearIntArith = "QF_LinearIntArith" + QF_LinearRealArith = "QF_LinearRealArith" + QF_NonLinearIntArith = "QF_NonLinearIntArith" + QF_NonLinearRealArith = "QF_NonLinearRealArith" + QF_Strings = "QF_Strings" + + +class Logic(str, Enum): + ABV = "ABV" + ABVFP = "ABVFP" + ABVFPLRA = "ABVFPLRA" + ALIA = "ALIA" + ANIA = "ANIA" + AUFBV = "AUFBV" + AUFBVDTLIA = "AUFBVDTLIA" + AUFBVDTNIA = "AUFBVDTNIA" + AUFBVDTNIRA = "AUFBVDTNIRA" + AUFBVFP = "AUFBVFP" + AUFDTLIA = "AUFDTLIA" + AUFDTLIRA = "AUFDTLIRA" + AUFDTNIRA = "AUFDTNIRA" + AUFFPDTNIRA = "AUFFPDTNIRA" + AUFLIA = "AUFLIA" + AUFLIRA = "AUFLIRA" + AUFNIA = "AUFNIA" + AUFNIRA = "AUFNIRA" + BV = "BV" + BVFP = "BVFP" + BVFPLRA = "BVFPLRA" + FP = "FP" + FPLRA = "FPLRA" + LIA = "LIA" + LRA = "LRA" + NIA = "NIA" + NRA = "NRA" + QF_ABV = "QF_ABV" + QF_ABVFP = "QF_ABVFP" + QF_ABVFPLRA = "QF_ABVFPLRA" + QF_ALIA = "QF_ALIA" + QF_ANIA = "QF_ANIA" + QF_AUFBV = "QF_AUFBV" + QF_AUFBVFP = "QF_AUFBVFP" + QF_AUFBVLIA = "QF_AUFBVLIA" + QF_AUFBVNIA = "QF_AUFBVNIA" + QF_AUFLIA = "QF_AUFLIA" + QF_AUFNIA = "QF_AUFNIA" + QF_AX = "QF_AX" + QF_BV = "QF_BV" + QF_BVFP = "QF_BVFP" + QF_BVFPLRA = "QF_BVFPLRA" + QF_DT = "QF_DT" + QF_FP = "QF_FP" + QF_FPLRA = "QF_FPLRA" + QF_IDL = "QF_IDL" + QF_LIA = "QF_LIA" + QF_LIRA = "QF_LIRA" + QF_LRA = "QF_LRA" + QF_NIA = "QF_NIA" + QF_NIRA = "QF_NIRA" + QF_NRA = "QF_NRA" + QF_RDL = "QF_RDL" + QF_S = "QF_S" + QF_SLIA = "QF_SLIA" + QF_SNIA = "QF_SNIA" + QF_UF = "QF_UF" + QF_UFBV = "QF_UFBV" + QF_UFBVDT = "QF_UFBVDT" + QF_UFBVLIA = "QF_UFBVLIA" + QF_UFDT = "QF_UFDT" + QF_UFDTLIA = "QF_UFDTLIA" + QF_UFDTLIRA = "QF_UFDTLIRA" + QF_UFDTNIA = "QF_UFDTNIA" + QF_UFFP = "QF_UFFP" + QF_UFFPDTNIRA = "QF_UFFPDTNIRA" + QF_UFIDL = "QF_UFIDL" + QF_UFLIA = "QF_UFLIA" + QF_UFLRA = "QF_UFLRA" + QF_UFNIA = "QF_UFNIA" + QF_UFNRA = "QF_UFNRA" + UF = "UF" + UFBV = "UFBV" + UFBVDT = "UFBVDT" + UFBVFP = "UFBVFP" + UFBVLIA = "UFBVLIA" + UFDT = "UFDT" + UFDTLIA = "UFDTLIA" + UFDTLIRA = "UFDTLIRA" + UFDTNIA = "UFDTNIA" + UFDTNIRA = "UFDTNIRA" + UFFPDTNIRA = "UFFPDTNIRA" + UFIDL = "UFIDL" + UFLIA = "UFLIA" + UFLRA = "UFLRA" + UFNIA = "UFNIA" + UFNRA = "UFNRA" + + +tracks: dict[Track, dict[Division, set[Logic]]] = { + Track.SingleQuery: { + Division.QF_Datatypes: { + Logic.QF_DT, + Logic.QF_UFDT, + }, + Division.QF_Equality: { + Logic.QF_AX, + Logic.QF_UF, + }, + Division.QF_Equality_LinearArith: { + Logic.QF_ALIA, + Logic.QF_AUFLIA, + Logic.QF_UFDTLIA, + Logic.QF_UFDTLIRA, + Logic.QF_UFIDL, + Logic.QF_UFLIA, + Logic.QF_UFLRA, + }, + Division.QF_Equality_NonLinearArith: { + Logic.QF_ANIA, + Logic.QF_AUFNIA, + Logic.QF_UFDTNIA, + Logic.QF_UFNIA, + Logic.QF_UFNRA, + }, + Division.QF_Equality_Bitvec: { + Logic.QF_ABV, + Logic.QF_AUFBV, + Logic.QF_UFBV, + Logic.QF_UFBVDT, + }, + Division.QF_LinearIntArith: { + Logic.QF_IDL, + Logic.QF_LIA, + Logic.QF_LIRA, + }, + Division.QF_LinearRealArith: { + Logic.QF_LRA, + Logic.QF_RDL, + }, + Division.QF_Bitvec: { + Logic.QF_BV, + }, + Division.QF_FPArith: { + Logic.QF_ABVFP, + Logic.QF_ABVFPLRA, + Logic.QF_AUFBVFP, + Logic.QF_BVFP, + Logic.QF_BVFPLRA, + Logic.QF_FP, + Logic.QF_FPLRA, + Logic.QF_UFFP, + Logic.QF_UFFPDTNIRA, + }, + Division.QF_NonLinearIntArith: { + Logic.QF_NIA, + Logic.QF_NIRA, + }, + Division.QF_NonLinearRealArith: { + Logic.QF_NRA, + }, + Division.QF_Strings: { + Logic.QF_S, + Logic.QF_SLIA, + Logic.QF_SNIA, + }, + Division.Equality: { + Logic.UF, + Logic.UFDT, + }, + Division.Equality_LinearArith: { + Logic.ALIA, + Logic.AUFDTLIA, + Logic.AUFDTLIRA, + Logic.AUFLIA, + Logic.AUFLIRA, + Logic.UFDTLIA, + Logic.UFDTLIRA, + Logic.UFIDL, + Logic.UFLIA, + Logic.UFLRA, + }, + Division.Equality_MachineArith: { + Logic.ABV, + Logic.ABVFP, + Logic.ABVFPLRA, + Logic.AUFBV, + Logic.AUFBVDTLIA, + Logic.AUFBVDTNIA, + Logic.AUFBVDTNIRA, + Logic.AUFBVFP, + Logic.AUFFPDTNIRA, + Logic.UFBV, + Logic.UFBVDT, + Logic.UFBVFP, + Logic.UFBVLIA, + Logic.UFFPDTNIRA, + }, + Division.Equality_NonLinearArith: { + Logic.ANIA, + Logic.AUFDTNIRA, + Logic.AUFNIA, + Logic.AUFNIRA, + Logic.UFDTNIA, + Logic.UFDTNIRA, + Logic.UFNIA, + }, + Division.Arith: { + Logic.LIA, + Logic.LRA, + Logic.NIA, + Logic.NRA, + }, + Division.Bitvec: { + Logic.BV, + }, + Division.FPArith: { + Logic.BVFP, + Logic.BVFPLRA, + Logic.FP, + Logic.FPLRA, + }, + }, + Track.Incremental: { + Division.QF_Equality: { + Logic.QF_UF, + }, + Division.QF_Equality_LinearArith: { + Logic.QF_ALIA, + Logic.QF_AUFLIA, + Logic.QF_UFLIA, + Logic.QF_UFLRA, + }, + Division.QF_Equality_NonLinearArith: { + Logic.QF_ANIA, + Logic.QF_UFNIA, + Logic.QF_UFNRA, + }, + Division.QF_Equality_Bitvec: { + Logic.QF_ABV, + Logic.QF_AUFBV, + Logic.QF_UFBV, + }, + Division.QF_Equality_Bitvec_Arith: { + Logic.QF_AUFBVLIA, + Logic.QF_AUFBVNIA, + Logic.QF_UFBVLIA, + }, + Division.QF_LinearIntArith: { + Logic.QF_LIA, + }, + Division.QF_LinearRealArith: { + Logic.QF_LRA, + }, + Division.QF_Bitvec: { + Logic.QF_BV, + }, + Division.QF_FPArith: { + Logic.QF_ABVFP, + Logic.QF_ABVFPLRA, + Logic.QF_BVFP, + Logic.QF_BVFPLRA, + Logic.QF_FP, + Logic.QF_UFFP, + }, + Division.QF_NonLinearIntArith: { + Logic.QF_NIA, + }, + Division.Equality: { + Logic.UF, + }, + Division.Equality_LinearArith: { + Logic.ALIA, + Logic.UFLRA, + }, + Division.Equality_MachineArith: { + Logic.ABVFPLRA, + }, + Division.Equality_NonLinearArith: { + Logic.ANIA, + Logic.AUFNIRA, + Logic.UFDTNIA, + Logic.UFNIA, + Logic.UFNRA, + }, + Division.Arith: { + Logic.LIA, + Logic.LRA, + }, + Division.Bitvec: { + Logic.BV, + }, + Division.FPArith: { + Logic.BVFP, + Logic.BVFPLRA, + }, + }, + Track.UnsatCore: { + Division.QF_Datatypes: { + Logic.QF_DT, + Logic.QF_UFDT, + }, + Division.QF_Equality: { + Logic.QF_AX, + Logic.QF_UF, + }, + Division.QF_Equality_LinearArith: { + Logic.QF_ALIA, + Logic.QF_AUFLIA, + Logic.QF_UFDTLIA, + Logic.QF_UFDTLIRA, + Logic.QF_UFIDL, + Logic.QF_UFLIA, + Logic.QF_UFLRA, + }, + Division.QF_Equality_NonLinearArith: { + Logic.QF_ANIA, + Logic.QF_AUFNIA, + Logic.QF_UFDTNIA, + Logic.QF_UFNIA, + Logic.QF_UFNRA, + }, + Division.QF_Equality_Bitvec: { + Logic.QF_ABV, + Logic.QF_AUFBV, + Logic.QF_UFBV, + Logic.QF_UFBVDT, + }, + Division.QF_LinearIntArith: { + Logic.QF_IDL, + Logic.QF_LIA, + Logic.QF_LIRA, + }, + Division.QF_LinearRealArith: { + Logic.QF_LRA, + Logic.QF_RDL, + }, + Division.QF_Bitvec: { + Logic.QF_BV, + }, + Division.QF_FPArith: { + Logic.QF_ABVFP, + Logic.QF_ABVFPLRA, + Logic.QF_AUFBVFP, + Logic.QF_BVFP, + Logic.QF_BVFPLRA, + Logic.QF_FP, + Logic.QF_FPLRA, + Logic.QF_UFFP, + Logic.QF_UFFPDTNIRA, + }, + Division.QF_NonLinearIntArith: { + Logic.QF_NIA, + Logic.QF_NIRA, + }, + Division.QF_NonLinearRealArith: { + Logic.QF_NRA, + }, + Division.QF_Strings: { + Logic.QF_S, + Logic.QF_SLIA, + Logic.QF_SNIA, + }, + Division.Equality: { + Logic.UF, + Logic.UFDT, + }, + Division.Equality_LinearArith: { + Logic.ALIA, + Logic.AUFDTLIA, + Logic.AUFDTLIRA, + Logic.AUFLIA, + Logic.AUFLIRA, + Logic.UFDTLIA, + Logic.UFDTLIRA, + Logic.UFIDL, + Logic.UFLIA, + Logic.UFLRA, + }, + Division.Equality_MachineArith: { + Logic.ABV, + Logic.ABVFP, + Logic.ABVFPLRA, + Logic.AUFBV, + Logic.AUFBVDTLIA, + Logic.AUFBVDTNIA, + Logic.AUFBVDTNIRA, + Logic.AUFBVFP, + Logic.AUFFPDTNIRA, + Logic.UFBV, + Logic.UFBVDT, + Logic.UFBVFP, + Logic.UFBVLIA, + Logic.UFFPDTNIRA, + }, + Division.Equality_NonLinearArith: { + Logic.ANIA, + Logic.AUFDTNIRA, + Logic.AUFNIA, + Logic.AUFNIRA, + Logic.UFDTNIA, + Logic.UFDTNIRA, + Logic.UFNIA, + }, + Division.Arith: { + Logic.LIA, + Logic.LRA, + Logic.NIA, + Logic.NRA, + }, + Division.Bitvec: { + Logic.BV, + }, + Division.FPArith: { + Logic.BVFP, + Logic.BVFPLRA, + Logic.FP, + Logic.FPLRA, + }, + }, + Track.ModelValidation: { + Division.QF_Datatypes: { + Logic.QF_DT, + Logic.QF_UFDT, + }, + Division.QF_Equality: { + Logic.QF_UF, + }, + Division.QF_Equality_LinearArith: { + Logic.QF_UFIDL, + Logic.QF_UFLIA, + Logic.QF_UFLRA, + }, + Division.QF_Equality_NonLinearArith: { + Logic.QF_ANIA, + Logic.QF_AUFNIA, + Logic.QF_UFDTNIA, + Logic.QF_UFNIA, + Logic.QF_UFNRA, + }, + Division.QF_Equality_Bitvec: { + Logic.QF_UFBV, + }, + Division.QF_ADT_BitVec: { + Logic.QF_ABV, + Logic.QF_AUFBV, + Logic.QF_UFBVDT, + }, + Division.QF_ADT_LinArith: { + Logic.QF_ALIA, + Logic.QF_AUFLIA, + Logic.QF_AX, + Logic.QF_UFDTLIA, + Logic.QF_UFDTLIRA, + }, + Division.QF_LinearIntArith: { + Logic.QF_IDL, + Logic.QF_LIA, + Logic.QF_LIRA, + }, + Division.QF_LinearRealArith: { + Logic.QF_LRA, + Logic.QF_RDL, + }, + Division.QF_Bitvec: { + Logic.QF_BV, + }, + Division.QF_FPArith: { + Logic.QF_ABVFP, + Logic.QF_ABVFPLRA, + Logic.QF_AUFBVFP, + Logic.QF_BVFP, + Logic.QF_BVFPLRA, + Logic.QF_FP, + Logic.QF_FPLRA, + Logic.QF_UFFP, + Logic.QF_UFFPDTNIRA, + }, + Division.QF_NonLinearIntArith: { + Logic.QF_NIA, + Logic.QF_NIRA, + }, + Division.QF_NonLinearRealArith: { + Logic.QF_NRA, + }, + }, + Track.ProofExhibition: { + Division.QF_Datatypes: { + Logic.QF_DT, + Logic.QF_UFDT, + }, + Division.QF_Equality: { + Logic.QF_AX, + Logic.QF_UF, + }, + Division.QF_Equality_LinearArith: { + Logic.QF_ALIA, + Logic.QF_AUFLIA, + Logic.QF_UFDTLIA, + Logic.QF_UFDTLIRA, + Logic.QF_UFIDL, + Logic.QF_UFLIA, + Logic.QF_UFLRA, + }, + Division.QF_Equality_NonLinearArith: { + Logic.QF_ANIA, + Logic.QF_AUFNIA, + Logic.QF_UFDTNIA, + Logic.QF_UFNIA, + Logic.QF_UFNRA, + }, + Division.QF_Equality_Bitvec: { + Logic.QF_ABV, + Logic.QF_AUFBV, + Logic.QF_UFBV, + Logic.QF_UFBVDT, + }, + Division.QF_LinearIntArith: { + Logic.QF_IDL, + Logic.QF_LIA, + Logic.QF_LIRA, + }, + Division.QF_LinearRealArith: { + Logic.QF_LRA, + Logic.QF_RDL, + }, + Division.QF_Bitvec: { + Logic.QF_BV, + }, + Division.QF_FPArith: { + Logic.QF_ABVFP, + Logic.QF_ABVFPLRA, + Logic.QF_AUFBVFP, + Logic.QF_BVFP, + Logic.QF_BVFPLRA, + Logic.QF_FP, + Logic.QF_FPLRA, + Logic.QF_UFFP, + Logic.QF_UFFPDTNIRA, + }, + Division.QF_NonLinearIntArith: { + Logic.QF_NIA, + Logic.QF_NIRA, + }, + Division.QF_NonLinearRealArith: { + Logic.QF_NRA, + }, + Division.QF_Strings: { + Logic.QF_S, + Logic.QF_SLIA, + Logic.QF_SNIA, + }, + Division.Equality: { + Logic.UF, + Logic.UFDT, + }, + Division.Equality_LinearArith: { + Logic.ALIA, + Logic.AUFDTLIA, + Logic.AUFDTLIRA, + Logic.AUFLIA, + Logic.AUFLIRA, + Logic.UFDTLIA, + Logic.UFDTLIRA, + Logic.UFIDL, + Logic.UFLIA, + Logic.UFLRA, + }, + Division.Equality_MachineArith: { + Logic.ABV, + Logic.ABVFP, + Logic.ABVFPLRA, + Logic.AUFBV, + Logic.AUFBVDTLIA, + Logic.AUFBVDTNIA, + Logic.AUFBVDTNIRA, + Logic.AUFBVFP, + Logic.AUFFPDTNIRA, + Logic.UFBV, + Logic.UFBVDT, + Logic.UFBVFP, + Logic.UFBVLIA, + Logic.UFFPDTNIRA, + }, + Division.Equality_NonLinearArith: { + Logic.ANIA, + Logic.AUFDTNIRA, + Logic.AUFNIA, + Logic.AUFNIRA, + Logic.UFDTNIA, + Logic.UFDTNIRA, + Logic.UFNIA, + }, + Division.Arith: { + Logic.LIA, + Logic.LRA, + Logic.NIA, + Logic.NRA, + }, + Division.Bitvec: { + Logic.BV, + }, + Division.FPArith: { + Logic.BVFP, + Logic.BVFPLRA, + Logic.FP, + Logic.FPLRA, + }, + }, + Track.Cloud: { + Division.QF_Datatypes: { + Logic.QF_DT, + Logic.QF_UFDT, + }, + Division.QF_Equality: { + Logic.QF_AX, + Logic.QF_UF, + }, + Division.QF_Equality_LinearArith: { + Logic.QF_ALIA, + Logic.QF_AUFLIA, + Logic.QF_UFDTLIA, + Logic.QF_UFDTLIRA, + Logic.QF_UFIDL, + Logic.QF_UFLIA, + Logic.QF_UFLRA, + }, + Division.QF_Equality_NonLinearArith: { + Logic.QF_ANIA, + Logic.QF_AUFNIA, + Logic.QF_UFDTNIA, + Logic.QF_UFNIA, + Logic.QF_UFNRA, + }, + Division.QF_Equality_Bitvec: { + Logic.QF_ABV, + Logic.QF_AUFBV, + Logic.QF_UFBV, + Logic.QF_UFBVDT, + }, + Division.QF_LinearIntArith: { + Logic.QF_IDL, + Logic.QF_LIA, + Logic.QF_LIRA, + }, + Division.QF_LinearRealArith: { + Logic.QF_LRA, + Logic.QF_RDL, + }, + Division.QF_Bitvec: { + Logic.QF_BV, + }, + Division.QF_FPArith: { + Logic.QF_ABVFP, + Logic.QF_ABVFPLRA, + Logic.QF_AUFBVFP, + Logic.QF_BVFP, + Logic.QF_BVFPLRA, + Logic.QF_FP, + Logic.QF_FPLRA, + Logic.QF_UFFP, + Logic.QF_UFFPDTNIRA, + }, + Division.QF_NonLinearIntArith: { + Logic.QF_NIA, + Logic.QF_NIRA, + }, + Division.QF_NonLinearRealArith: { + Logic.QF_NRA, + }, + Division.QF_Strings: { + Logic.QF_S, + Logic.QF_SLIA, + Logic.QF_SNIA, + }, + Division.Equality: { + Logic.UF, + Logic.UFDT, + }, + Division.Equality_LinearArith: { + Logic.ALIA, + Logic.AUFDTLIA, + Logic.AUFDTLIRA, + Logic.AUFLIA, + Logic.AUFLIRA, + Logic.UFDTLIA, + Logic.UFDTLIRA, + Logic.UFIDL, + Logic.UFLIA, + Logic.UFLRA, + }, + Division.Equality_MachineArith: { + Logic.ABV, + Logic.ABVFP, + Logic.ABVFPLRA, + Logic.AUFBV, + Logic.AUFBVDTLIA, + Logic.AUFBVDTNIA, + Logic.AUFBVDTNIRA, + Logic.AUFBVFP, + Logic.AUFFPDTNIRA, + Logic.UFBV, + Logic.UFBVDT, + Logic.UFBVFP, + Logic.UFBVLIA, + Logic.UFFPDTNIRA, + }, + Division.Equality_NonLinearArith: { + Logic.ANIA, + Logic.AUFDTNIRA, + Logic.AUFNIA, + Logic.AUFNIRA, + Logic.UFDTNIA, + Logic.UFDTNIRA, + Logic.UFNIA, + }, + Division.Arith: { + Logic.LIA, + Logic.LRA, + Logic.NIA, + Logic.NRA, + }, + Division.Bitvec: { + Logic.BV, + }, + Division.FPArith: { + Logic.BVFP, + Logic.BVFPLRA, + Logic.FP, + Logic.FPLRA, + }, + }, + Track.Parallel: { + Division.QF_Datatypes: { + Logic.QF_DT, + Logic.QF_UFDT, + }, + Division.QF_Equality: { + Logic.QF_AX, + Logic.QF_UF, + }, + Division.QF_Equality_LinearArith: { + Logic.QF_ALIA, + Logic.QF_AUFLIA, + Logic.QF_UFDTLIA, + Logic.QF_UFDTLIRA, + Logic.QF_UFIDL, + Logic.QF_UFLIA, + Logic.QF_UFLRA, + }, + Division.QF_Equality_NonLinearArith: { + Logic.QF_ANIA, + Logic.QF_AUFNIA, + Logic.QF_UFDTNIA, + Logic.QF_UFNIA, + Logic.QF_UFNRA, + }, + Division.QF_Equality_Bitvec: { + Logic.QF_ABV, + Logic.QF_AUFBV, + Logic.QF_UFBV, + Logic.QF_UFBVDT, + }, + Division.QF_LinearIntArith: { + Logic.QF_IDL, + Logic.QF_LIA, + Logic.QF_LIRA, + }, + Division.QF_LinearRealArith: { + Logic.QF_LRA, + Logic.QF_RDL, + }, + Division.QF_Bitvec: { + Logic.QF_BV, + }, + Division.QF_FPArith: { + Logic.QF_ABVFP, + Logic.QF_ABVFPLRA, + Logic.QF_AUFBVFP, + Logic.QF_BVFP, + Logic.QF_BVFPLRA, + Logic.QF_FP, + Logic.QF_FPLRA, + Logic.QF_UFFP, + Logic.QF_UFFPDTNIRA, + }, + Division.QF_NonLinearIntArith: { + Logic.QF_NIA, + Logic.QF_NIRA, + }, + Division.QF_NonLinearRealArith: { + Logic.QF_NRA, + }, + Division.QF_Strings: { + Logic.QF_S, + Logic.QF_SLIA, + Logic.QF_SNIA, + }, + Division.Equality: { + Logic.UF, + Logic.UFDT, + }, + Division.Equality_LinearArith: { + Logic.ALIA, + Logic.AUFDTLIA, + Logic.AUFDTLIRA, + Logic.AUFLIA, + Logic.AUFLIRA, + Logic.UFDTLIA, + Logic.UFDTLIRA, + Logic.UFIDL, + Logic.UFLIA, + Logic.UFLRA, + }, + Division.Equality_MachineArith: { + Logic.ABV, + Logic.ABVFP, + Logic.ABVFPLRA, + Logic.AUFBV, + Logic.AUFBVDTLIA, + Logic.AUFBVDTNIA, + Logic.AUFBVDTNIRA, + Logic.AUFBVFP, + Logic.AUFFPDTNIRA, + Logic.UFBV, + Logic.UFBVDT, + Logic.UFBVFP, + Logic.UFBVLIA, + Logic.UFFPDTNIRA, + }, + Division.Equality_NonLinearArith: { + Logic.ANIA, + Logic.AUFDTNIRA, + Logic.AUFNIA, + Logic.AUFNIRA, + Logic.UFDTNIA, + Logic.UFDTNIRA, + Logic.UFNIA, + }, + Division.Arith: { + Logic.LIA, + Logic.LRA, + Logic.NIA, + Logic.NRA, + }, + Division.Bitvec: { + Logic.BV, + }, + Division.FPArith: { + Logic.BVFP, + Logic.BVFPLRA, + Logic.FP, + Logic.FPLRA, + }, + }, +} + + +class Logics(RootModel): + root: list[Logic] + + @model_validator(mode="before") + @classmethod + def name_is_default_field(cls, data: Any) -> Any: + if isinstance(data, str): + return cls.logics_from_regexp(data) + return data + + @classmethod + def from_regexp(cls, data: str) -> Logics: + return Logics(root=cls.logics_from_regexp(data)) + + @classmethod + def logics_from_regexp(cls, data: str) -> list[Logic]: + logics = [] + r = re.compile(data) + for logic in Logic: + if r.fullmatch(logic): + logics.append(logic) + return logics + + +class Archive(BaseModel): + url: HttpUrl + h: Hash | None = None + + def uniq_id(self) -> str: + return hashlib.sha256(str(self.url).encode()).hexdigest() + + def path(self) -> Path: + return Path(self.uniq_id()) + + +class Command(BaseModel, extra="forbid"): + binary: str + arguments: list[str] = [] + compa_starexec: bool = False + + @model_validator(mode="before") + @classmethod + def split_command(cls, data: Any) -> Any: + if isinstance(data, list): + if len(data) < 1: + raise ValueError("Command must be a non empty list") + return {"binary": data[0], "arguments": data[1:]} + return data + + def uniq_id(self, name: str, archive: Archive) -> str: + data = [name, str(archive.url), self.binary, *self.arguments] + h = hashlib.sha256(" ".join(data).encode()) + return h.hexdigest() + + +class Participation(BaseModel, extra="forbid"): + tracks: list[Track] + logics: Logics = Logics(root=[]) + divisions: list[Division] = [] + archive: Archive | None = None + command: Command | None = None + experimental: bool = False + + def get(self, d: None | dict[Track, dict[Division, set[Logic]]] = None) -> dict[Track, dict[Division, set[Logic]]]: + if d is None: + d = {} + for track in self.tracks: + divs = d.setdefault(track, {}) + for division in self.divisions: + logics: set[Logic] = divs.setdefault(division, set()) + logics.update(tracks[track][division]) + for logic in self.logics.root: + for div, logics in tracks[track].items(): + if logic in logics: + logics = divs.setdefault(div, set()) + logics.add(logic) + return d + + +class Participations(RootModel): + root: list[Participation] + + def get_divisions(self, track: Track) -> list[Division]: + """ " Return the divisions in which the solver participates""" + return [] # TODO + + def get_logics(self, track: Track) -> list[Logic]: + """ " Return the logics in which the solver participates""" + return [] # TODO + + def get(self, d: None | dict[Track, dict[Division, set[Logic]]] = None) -> dict[Track, dict[Division, set[Logic]]]: + if d is None: + d = {} + for p in self.root: + p.get(d) + return d + + +class Submission(BaseModel, extra="forbid"): + name: str + contributors: list[Contributor] = Field(min_length=1) + contacts: list[NameEmail] = Field(min_length=1) + archive: Archive | None = None + command: Command | None = None + website: HttpUrl + system_description: HttpUrl + solver_type: SolverType + participations: Participations + + @model_validator(mode="after") + def check_archive(self) -> Submission: + if self.archive is None and not all(p.archive for p in self.participations.root): + raise ValueError("Field archive is needed in all participations if not present at the root") + if self.command is None and not all(p.command for p in self.participations.root): + raise ValueError("Field command is needed in all participations if not present at the root") + return self + + def uniq_id(self) -> str: + return hashlib.sha256(self.name.encode()).hexdigest() + + +default = {"timelimit_s": 60, "memlimit_M": 1024 * 20, "cpuCores": 4} diff --git a/smtcomp/generate_benchmarks.py b/smtcomp/generate_benchmarks.py new file mode 100644 index 00000000..30a38a1b --- /dev/null +++ b/smtcomp/generate_benchmarks.py @@ -0,0 +1,27 @@ +from typing import Set, Dict +from pathlib import Path +from smtcomp import defs + + +def generate_benchmarks(dst: Path) -> None: + dst.joinpath("files").mkdir(parents=True, exist_ok=True) + for track, divisions in defs.tracks.items(): + match track: + case defs.Track.Incremental: + suffix = "_inc" + case defs.Track.ModelValidation: + suffix = "_model" + case defs.Track.SingleQuery: + suffix = "" + case defs.Track.UnsatCore | defs.Track.ProofExhibition | defs.Track.Cloud | defs.Track.Parallel: + continue + for _, theories in divisions.items(): + for theory in theories: + file = dst.joinpath(theory + suffix) + file_sat = dst.joinpath("files", theory + suffix + ".sat.smt2") + file_unsat = dst.joinpath("files", theory + suffix + ".unsat.smt2") + + file.write_text("\n".join([str(file_sat.relative_to(dst)), str(file_unsat.relative_to(dst))])) + + file_sat.write_text(f"(set-logic {theory.value})(check-sat)") + file_unsat.write_text(f"(set-logic {theory.value})(assert false)(check-sat)") diff --git a/smtcomp/main.py b/smtcomp/main.py new file mode 100644 index 00000000..c6cde553 --- /dev/null +++ b/smtcomp/main.py @@ -0,0 +1,121 @@ +import json +from pathlib import Path +from typing import List +import rich +from rich.progress import track +import typer +from pydantic import ValidationError + +import smtcomp.archive as archive +import smtcomp.benchexec as benchexec +import smtcomp.defs as defs +import smtcomp.submission as submission +from smtcomp.benchmarks import clone_group +from smtcomp.convert_csv import convert_csv as convert_csv_file +import smtcomp.generate_benchmarks + +app = typer.Typer() + + +@app.command() +def show(file: str) -> None: + """ + Show information about a solver submission + """ + s = None + try: + s = submission.read(file) + except Exception as e: + rich.print(f"[red]Error during file parsing of {file}[/red]") + print(e) + exit(1) + if not s: + rich.print(f"[red]Empty submission??? {file}[/red]") + exit(1) + submission.show(s) + + +@app.command() +def validate(file: str) -> None: + """ + Validate a json defining a solver submission + """ + try: + submission.read(file) + except ValidationError as e: + print(e) + exit(1) + + +@app.command() +def convert_csv(file: str, dstdir: Path) -> None: + """ + Convert a csv (old submission format) to json files (new format) + """ + dstdir.mkdir(parents=True, exist_ok=True) + convert_csv_file(Path(file), Path(dstdir)) + + +@app.command() +def dump_json_schema(dst: Path) -> None: + """ + Dump the json schemas used for submissions at the given file + """ + with open(dst, "w") as f: + f.write(json.dumps(defs.Submission.model_json_schema(), indent=2)) + + +@app.command() +def download_benchmarks(dst: Path, dryrun: bool = False) -> None: + """ + Clone or update all the benchmarks used by the SMTCOMP + """ + clone_group("SMT-LIB-benchmarks", dst.joinpath("non-incremental"), dryrun) + clone_group("SMT-LIB-benchmarks-inc", dst.joinpath("incremental"), dryrun) + + +@app.command() +def generate_benchexec( + files: List[Path], + dst: Path, + cachedir: Path, + timelimit_s: int = defs.default["timelimit_s"], + memlimit_M: int = defs.default["memlimit_M"], + cpuCores: int = defs.default["cpuCores"], +) -> None: + """ + Generate the benchexec file for the given submissions + """ + cmdtasks: List[benchexec.CmdTask] = [] + for file in track(files): + s = submission.read(str(file)) + res = benchexec.cmdtask_for_submission(s, cachedir) + cmdtasks.extend(res) + benchexec.generate_xml( + timelimit_s=timelimit_s, memlimit_M=memlimit_M, cpuCores=cpuCores, cmdtasks=cmdtasks, file=dst + ) + + +@app.command() +def download_archive(files: List[Path], dst: Path) -> None: + """ + Download and unpack + """ + for file in track(files): + dst.mkdir(parents=True, exist_ok=True) + s = submission.read(str(file)) + if s.archive: + archive.download(s.archive, dst) + archive.unpack(s.archive, dst) + for p in s.participations.root: + if p.archive: + archive.download(p.archive, dst) + archive.unpack(p.archive, dst) + + +@app.command() +def generate_benchmarks(dst: Path) -> None: + """ + Generate trivial benchmarks for testing + """ + smtcomp.generate_benchmarks.generate_benchmarks(dst) diff --git a/smtcomp/submission.py b/smtcomp/submission.py new file mode 100644 index 00000000..1cd1594b --- /dev/null +++ b/smtcomp/submission.py @@ -0,0 +1,25 @@ +from pathlib import Path + +import rich +from rich.tree import Tree + +from smtcomp.defs import Submission + + +def read(file: str) -> Submission: + return Submission.model_validate_json(Path(file).read_text()) + + +def show(s: Submission) -> None: + tree = Tree(f"[bold]{s.name}[/bold]") + tree.add(f"{len(s.contributors)} authors") + tree.add(f"website: {s.website}") + tracks = s.participations.get() + tree_part = tree.add("Participations") + for track, divs in sorted(tracks.items()): + tree_track = tree_part.add(track) + for div, logics in sorted(divs.items()): + tree_div = tree_track.add(div) + for logic in sorted(logics): + tree_div.add(logic) + rich.print(tree) diff --git a/smtcomp/tool.py b/smtcomp/tool.py new file mode 100644 index 00000000..1dd0d647 --- /dev/null +++ b/smtcomp/tool.py @@ -0,0 +1,92 @@ +from typing import List, Optional, Any +import benchexec.util as util +import benchexec.result as result +from benchexec.tools.template import BaseTool2 +import sys, re + +fallback_name = "./false" + + +class Tool(BaseTool2): # type: ignore + """ + Generic tool for smtcomp execution + """ + + def determine_result(self, run: BaseTool2.Run) -> Any: # type: ignore + """Adaptation of Jochen Hoenicke process script + + A simple post-processor for SMT. + The logic is: + - remove success outputs (they're ignored for now) + - take the first line + - all other lines after it ignored + - if the line is sat, unsat, or unknown this is the status + - if no such line exists, the status is unknown. + - all lines after the result are ignored. + """ + + returncode: int = run.exit_code.value + returnsignal: int = run.exit_code.signal + output: List[str] = run.output + isTimeout: bool = run.was_timeout + + if returnsignal is None: + status = None + for line in output: + line = line.strip() + # ignore + if re.compile("^\s*(success|;.*)?\s*$").match(line): + continue + if line == "unsat": + return result.RESULT_FALSE_PROP + elif line == "sat": + return result.RESULT_TRUE_PROP + else: + return result.RESULT_UNKNOWN + return result.RESULT_UNKNOWN + + elif ((returnsignal == 9) or (returnsignal == 15)) and isTimeout: + status = result.RESULT_TIMEOUT + + elif returnsignal == 9: + status = "KILLED BY SIGNAL 9" + elif returnsignal == 6: + status = "ABORTED" + elif returnsignal == 15: + status = "KILLED" + else: + status = f"ERROR ({returncode})" + + return status + + def executable(self, _: Any) -> str | Any | None: + return util.find_executable("smtlib2_trace_executor", fallback=fallback_name, exitOnError=False) + + def version(self, executable: str) -> str: + return "" + + def name(self) -> str: + return "SC" + + def cmdline( # type: ignore + self, + executable: str, + options: List[str], + task: BaseTool2.Task, + rlimits: BaseTool2.ResourceLimits, + ) -> Any: + tasks = task.input_files + options = options + ([] if task.options is None else task.options) + assert len(tasks) <= 1, "only one inputfile supported" + assert len(options) >= 2, "options give the mode and command to run" + cmd = options[0] + options = options[1:] + if cmd == "direct": + return [*options, *tasks] + elif cmd == "trace": + if executable == fallback_name: + sys.exit("benchexec smtcomp tool needs 'smtlib2_trace_executor' for tracing") + else: + return [executable, *options, *tasks] + else: + sys.exit("benchexec smtcomp executor accept only mode direct or trace") diff --git a/smtcomp/unpack.py b/smtcomp/unpack.py new file mode 100644 index 00000000..71bbd371 --- /dev/null +++ b/smtcomp/unpack.py @@ -0,0 +1,32 @@ +# It is hard to find a library to unpack *with permission* +# and securely + +from pathlib import Path +from zipfile import ZipFile +from tarfile import TarFile +from stat import S_IXUSR + +ZIP_UNIX_SYSTEM = 3 + + +def zip_extract_all_with_executable_permission(file: Path, target_dir: Path) -> None: + with ZipFile(file, "r") as zf: + for info in zf.infolist(): + extracted_path = Path(zf.extract(info, target_dir)) + + if info.create_system == ZIP_UNIX_SYSTEM and extracted_path.is_file(): + unix_attributes = info.external_attr >> 16 + if unix_attributes & S_IXUSR: + extracted_path.chmod(extracted_path.stat().st_mode | S_IXUSR) + + +def tar_extract_all_with_executable_permission(file: Path, target_dir: Path) -> None: + with TarFile(file, "r") as tf: + tf.extractall(path=target_dir, filter="data") + + +def extract_all_with_executable_permission(file: Path, target_dir: Path) -> None: + if str(file).endswith(".zip"): + zip_extract_all_with_executable_permission(file, target_dir) + else: + tar_extract_all_with_executable_permission(file, target_dir) diff --git a/submissions/Readme.md b/submissions/Readme.md new file mode 100644 index 00000000..a83bff88 --- /dev/null +++ b/submissions/Readme.md @@ -0,0 +1,20 @@ +[//]: # "Generated from submissions/template/template.md" + +## Submissions directory + +The creation of new submission is done through the creation of a new json file +in this directory and the submission of a Pull Request. + +It could be done directly from the web-interface and starting with a template: +[create a new submission](https://github.com/SMT-COMP/smt-comp.github.io/new/new_submission/submissions?value=%7B%0A%20%20%20%20%22name%22%3A%20%22%3Csolver%20name%3E%22%2C%0A%20%20%20%20%22contributors%22%3A%20%5B%0A%20%20%20%20%20%20%20%20%22First%20Smith%22%2C%0A%20%20%20%20%20%20%20%20%7B%20%22name%22%3A%20%22Second%20Baker%22%2C%20%22website%22%3A%20%22http%3A%2F%2Fbaker.com%2F%22%20%7D%0A%20%20%20%20%5D%2C%0A%20%20%20%20%22contacts%22%3A%20%5B%22contact%20name%20%3Ccontact%40email.com%3E%22%5D%2C%0A%20%20%20%20%22archive%22%3A%20%7B%0A%20%20%20%20%20%20%20%20%22url%22%3A%20%22http%3A%2F%2Fexample.com%2Fsolver.tar.gz%22%2C%0A%20%20%20%20%20%20%20%20%22h%22%3A%20%7B%20%22sha256%22%3A%20%22012345%22%20%7D%0A%20%20%20%20%7D%2C%0A%20%20%20%20%22website%22%3A%20%22http%3A%2F%2Fexample.com%2F%22%2C%0A%20%20%20%20%22system_description%22%3A%20%22http%3A%2F%2Fexample.com%2Fsystem.pdf%22%2C%0A%20%20%20%20%22command%22%3A%20%5B%22relative_cmd%22%2C%20%22default_command_line%22%5D%2C%0A%20%20%20%20%22solver_type%22%3A%20%22Standalone%22%2C%0A%20%20%20%20%22participations%22%3A%20%5B%0A%20%20%20%20%20%20%20%20%7B%20%22tracks%22%3A%20%5B%22SingleQuery%22%5D%2C%20%22divisions%22%3A%20%5B%22Equality%22%5D%20%7D%2C%0A%20%20%20%20%20%20%20%20%7B%0A%20%20%20%20%20%20%20%20%20%20%20%20%22tracks%22%3A%20%5B%22SingleQuery%22%5D%2C%0A%20%20%20%20%20%20%20%20%20%20%20%20%22logics%22%3A%20%22QF_.%2ALRA.%2A%22%2C%0A%20%20%20%20%20%20%20%20%20%20%20%20%22command%22%3A%20%5B%22relative_cmd%22%2C%20%22other_option%22%5D%0A%20%20%20%20%20%20%20%20%7D%2C%0A%20%20%20%20%20%20%20%20%7B%0A%20%20%20%20%20%20%20%20%20%20%20%20%22tracks%22%3A%20%5B%22SingleQuery%22%5D%2C%0A%20%20%20%20%20%20%20%20%20%20%20%20%22logics%22%3A%20%5B%22LIA%22%5D%2C%0A%20%20%20%20%20%20%20%20%20%20%20%20%22archive%22%3A%20%7B%20%22url%22%3A%20%22http%3A%2F%2Fexample.com%2Fsolver_lia.tar.gz%22%20%7D%2C%0A%20%20%20%20%20%20%20%20%20%20%20%20%22command%22%3A%20%5B%22relative_cmd%22%2C%20%22--super-lia%22%5D%0A%20%20%20%20%20%20%20%20%7D%0A%20%20%20%20%5D%0A%7D%0A) + +- The filename should start with the name of your solver and end with `.json` +- The continuous integration will check the format + +### Fields + +- `name`: The solver name should respect the guidelines in the given in the + rules of the SMT-competition (derived solver, wrapper solver, ...) +- `authors`: (utf8 can be used? It should be checked. tuple with latin1?) +- `url`: The url should be valid at the time of submission and during all the + competition. Do we require zenodo for final version? diff --git a/submissions/template/generate_Readme.py b/submissions/template/generate_Readme.py new file mode 100644 index 00000000..01248481 --- /dev/null +++ b/submissions/template/generate_Readme.py @@ -0,0 +1,45 @@ +from pathlib import Path +from string import Template +from urllib.parse import quote + +import rich +import typer + +app = typer.Typer() + + +def substitute(): + tdir = Path("submissions").joinpath("template") + + json = tdir.joinpath("template.json").read_text() + json = quote(json, safe="") + + src = tdir.joinpath("template.md") + src = Template(src.read_text()) + + result = '[//]: # "Generated from submissions/template/template.md"\n\n' + result += src.safe_substitute({"value": json}) + return result + + +dst = Path("submissions").joinpath("Readme.md") + + +@app.command() +def generate(): + dst.write_text(substitute()) + + +@app.command() +def check(): + current = dst.read_text() + oracle = substitute() + if current == oracle: + rich.print(":white_check_mark: submissions/Readme.md is up to date") + exit(0) + else: + rich.print(":heavy_exclamation_mark: submissions/Readme.md is obsolete!") + exit(1) + + +app() diff --git a/submissions/template/template.json b/submissions/template/template.json new file mode 100644 index 00000000..3a123c96 --- /dev/null +++ b/submissions/template/template.json @@ -0,0 +1,30 @@ +{ + "name": "", + "contributors": [ + "First Smith", + { "name": "Second Baker", "website": "http://baker.com/" } + ], + "contacts": ["contact name "], + "archive": { + "url": "http://example.com/solver.tar.gz", + "h": { "sha256": "012345" } + }, + "website": "http://example.com/", + "system_description": "http://example.com/system.pdf", + "command": ["relative_cmd", "default_command_line"], + "solver_type": "Standalone", + "participations": [ + { "tracks": ["SingleQuery"], "divisions": ["Equality"] }, + { + "tracks": ["SingleQuery"], + "logics": "QF_.*LRA.*", + "command": ["relative_cmd", "other_option"] + }, + { + "tracks": ["SingleQuery"], + "logics": ["LIA"], + "archive": { "url": "http://example.com/solver_lia.tar.gz" }, + "command": ["relative_cmd", "--super-lia"] + } + ] +} diff --git a/submissions/template/template.md b/submissions/template/template.md new file mode 100644 index 00000000..c6133dc9 --- /dev/null +++ b/submissions/template/template.md @@ -0,0 +1,18 @@ +## Submissions directory + +The creation of new submission is done through the creation of a new json file +in this directory and the submission of a Pull Request. + +It could be done directly from the web-interface and starting with a template: +[create a new submission](https://github.com/SMT-COMP/smt-comp.github.io/new/new_submission/submissions?value=$value) + +- The filename should start with the name of your solver and end with `.json` +- The continuous integration will check the format + +### Fields + +- `name`: The solver name should respect the guidelines in the given in the + rules of the SMT-competition (derived solver, wrapper solver, ...) +- `authors`: (utf8 can be used? It should be checked. tuple with latin1?) +- `url`: The url should be valid at the time of submission and during all the + competition. Do we require zenodo for final version? diff --git a/tests/solvers_divisions_final.csv b/tests/solvers_divisions_final.csv new file mode 100644 index 00000000..0ce5063d --- /dev/null +++ b/tests/solvers_divisions_final.csv @@ -0,0 +1,26 @@ +Preliminary Solver ID,Solver ID,Config ID Single Query,Config ID Incremental,Config ID Model Validation,Config ID Unsat Core,Config ID Proof Exhibition,Solver Name,Solver homepage,System description URL,System description name,Competing,Single Query Regex,Incremental Regex,Model Validation Regex,Unsat Core Regex,Proof Exhibition Regex,Cloud Regex,Parallel Regex,Single Query Track,Incremental Track,Model Validation Track,Unsat Core Track,Proof Exhibition Track,Cloud Track,Parallel Track,Variant Of,Wrapper Tool,Derived Tool,Contact,Team Members,Seed +44384,44384,741775,,,,,"COLIBRI","http://colibri.frama-c.com","https://drive.google.com/uc?export=download&id=1FZtsbl5hSVegkfPUuHC5vs7IDgFwsn8Z","COLIBRI: SMT solving with CP",yes,"^((QF_)(AX?)?(UF)?(BV)?(FP)(DT)?S?([LN][IR]*A|[IR]DL)?)$","-","-","-","-","-","-",QF_ABVFP;QF_ABVFPLRA;QF_AUFBVFP;QF_BVFP;QF_BVFPLRA;QF_FP;QF_FPLRA;QF_UFFP,,,,,,,"","","",bruno.marre@cea.fr,"Bruno Marre, François Bobot, Zakaria Chihani",413 +44702(sq);44703(mv),44702;44703(mv),741780,,741781,,,"Z3++","https://z3-plus-plus.github.io/","https://github.com/z3-plus-plus/z3-plus-plus.github.io/blob/main/z3%2B%2B_at_smt_comp_2023.pdf","Z3++ at SMT-COMP 2023",yes,"^(QF_IDL|QF_LIA|QF_NIA|QF_NRA)$","-","^(QF_IDL|QF_LIA|QF_NIA|QF_NRA)$","-","-","-","-",QF_IDL;QF_LIA;QF_NIA;QF_NRA,,QF_IDL;QF_LIA;QF_NIA;QF_NRA,,,,,"","","Z3-4-8-15",libohan19@mails.ucas.ac.cn,"Shaowei Cai, Bohan Li, Bohua Zhan, Xindi Zhang, and Mengyu Zhao",1 +44411,44765,741806,,741806,,,"SMT-RAT-MCSAT","https://ths-rwth.github.io/smtrat/","https://raw.githubusercontent.com/ths-rwth/smtrat/master/doc/smtcomp-description/smtcomp-2023.pdf","SMT-RAT 23.05",yes,"QF_NRA","-","QF_NRA","-","-","-","-",QF_NRA,,QF_NRA,,,,,"","","",nalbach@cs.rwth-aachen.de,"Jasper Nalbach, Valentin Promies, Erika Ábrahám, Philip Kroll",48738 +44420,44767,741807,,,,,"OSTRICH","https://github.com/uuverifiers/ostrich","https://philipp.ruemmer.org/ostrich-2023.pdf","OSTRICH Version 1.3",yes,"^(QF_S|QF_SLIA|QF_SNIA)$","-","-","-","-","-","-",QF_S;QF_SLIA;QF_SNIA,,,,,,,"","","OSTRICH is based on the SMT solver Princess (version 2023-05-27)",philipp.ruemmer@gmail.com,"Taolue Chen Riccardo De Masellis Alejandro Flores-Lamas Matthew Hague Zhilei Han Denghang Hu Shuanglong Kan Anthony W. Lin Oliver Markgraf Philipp Rümmer Amanda Stjerna Zhilin Wu",67223 +44672,44672,,,741778,,,"ismt","https://github.com/MRVAPOR/Yices-ismt","https://github.com/MRVAPOR/Yices-ismt-description","ISMT for SMT-COMP 2023",yes,"-","-","^(QF_NIA)$","-","-","-","-",,,QF_NIA,,,,,"","","",jiafq@ios.ac.cn,"Fuqi Jia, Rui Han, Minghao Liu, Cunjing Ge, Pei Huang, Feifei Ma, Jian Zhang.",20230512 +44713,44713,741783,,,,,"yices-ismt","https://github.com/MRVAPOR/Yices-ismt","https://github.com/MRVAPOR/Yices-ismt-description","Yices-ISMT for SMT-COMP 2023",yes,"^(QF_NIA)$","-","-","-","-","-","-",QF_NIA,,,,,,,"","Yices2 2.6.2","",jiafq@ios.ac.cn,"Fuqi Jia, Rui Han, Minghao Liu, Cunjing Ge, Pei Huang, Feifei Ma, Jian Zhang.", +44469,44741;44742(mv),741792,,741793,,,"cvc5-NRA-LS","https://github.com/minghao-liu/NRA-LS","https://github.com/minghao-liu/NRA-LS/blob/main/system_description_2023.pdf","NRA-LS at the SMT Competition 2023",yes,"^(QF_NRA)$","-","^(QF_NRA)$","-","-","-","-",QF_NRA,,QF_NRA,,,,,"","cvc5-1.0.5","",liumh@ios.ac.cn,"Minghao Liu, Kunhang Lv, Fuqi Jia, Rui Han, Yu Zhang, Pei Huang, Feifei Ma, Jian Zhang",99 +44471,44715,741784,,,,,"Z3-Owl","https://github.com/z3-owl/z3-owl.github.io","https://github.com/z3-owl/z3-owl.github.io/blob/main/Z3owl_SMT_COMP_2023.pdf","Z3-Owl at SMT-COMP 2023",yes,"^(QF_BV|QF_UFBV|QF_ABV|QF_AUFBV|QF_FP|QF_BVFP)$","-","-","-","-","-","^QF_BV$",QF_ABV;QF_AUFBV;QF_BV;QF_BVFP;QF_FP;QF_UFBV,,,,,,QF_BV,"","","Z3(version 4.8.11)",jasonj@zju.edu.cn,"Xinkai Ma,Jiahui Sun, Siyuan Zhu, Peisen Yao, Rui Chang, Yongwang Zhao, Wensheng Tang, and Charles Zhang",8 +44719,44759,741799,742173,741799,741799,742175,"SMTInterpol","https://ultimate.informatik.uni-freiburg.de/smtinterpol","https://ultimate.informatik.uni-freiburg.de/smtinterpol/sysdesc2023.pdf","SMTInterpol with resolution proofs",yes,"^((QF_)?(AX?)?(UF)?(DT)?([IR]DL|L[IR]*A)?|QF_(A|UF)+N[IR]*A)$","^((QF_)?(AX?)?(UF)?(DT)?([IR]DL|[NL][IR]*A)?)$","^((QF_)(AX?)?(UF)?(DT)?([IR]DL|L[IR]*A)?)$","^((QF_)?(AX?)?(UF)?(DT)?([IR]DL|L[IR]*A)?|QF_(A|UF)+N[IR]*A)$","^((QF_)?(AX?)?(UF)?(DT)?([IR]DL|L[IR]*A)?|QF_(A|UF)+N[IR]*A)$","-","-",ALIA;AUFDTLIA;AUFDTLIRA;AUFLIA;AUFLIRA;LIA;LRA;QF_ALIA;QF_ANIA;QF_AUFLIA;QF_AUFNIA;QF_IDL;QF_LIA;QF_LIRA;QF_LRA;QF_RDL;QF_UFDTLIA;QF_UFDTLIRA;QF_UFIDL;QF_UFLIA;QF_UFLRA;QF_UFNIA;QF_UFNRA;UFDTLIA;UFDTLIRA;UFIDL;UFLIA;UFLRA,ALIA;ANIA;AUFNIRA;LIA;LRA;QF_ALIA;QF_ANIA;QF_AUFLIA;QF_LIA;QF_LRA;QF_NIA;QF_UF;QF_UFLIA;QF_UFLRA;QF_UFNIA;QF_UFNRA;UF;UFDTNIA;UFLRA;UFNIA;UFNRA,QF_ALIA;QF_AUFLIA;QF_AX;QF_DT;QF_IDL;QF_LIA;QF_LIRA;QF_LRA;QF_RDL;QF_UF;QF_UFDT;QF_UFDTLIA;QF_UFDTLIRA;QF_UFIDL;QF_UFLIA;QF_UFLRA,ALIA;AUFDTLIA;AUFDTLIRA;AUFLIA;AUFLIRA;LIA;LRA;QF_ALIA;QF_ANIA;QF_AUFLIA;QF_AUFNIA;QF_AX;QF_DT;QF_IDL;QF_LIA;QF_LIRA;QF_LRA;QF_RDL;QF_UF;QF_UFDT;QF_UFDTLIA;QF_UFDTLIRA;QF_UFIDL;QF_UFLIA;QF_UFLRA;QF_UFNIA;QF_UFNRA;UF;UFDT;UFDTLIA;UFDTLIRA;UFIDL;UFLIA;UFLRA,ALIA;AUFDTLIA;AUFDTLIRA;AUFLIA;AUFLIRA;LIA;LRA;QF_ALIA;QF_ANIA;QF_AUFLIA;QF_AUFNIA;QF_AX;QF_DT;QF_IDL;QF_LIA;QF_LIRA;QF_LRA;QF_RDL;QF_UF;QF_UFDT;QF_UFDTLIA;QF_UFDTLIRA;QF_UFIDL;QF_UFLIA;QF_UFLRA;QF_UFNIA;QF_UFNRA;UF;UFDT;UFDTLIA;UFDTLIRA;UFIDL;UFLIA;UFLRA,,,"","","",hoenicke@gmail.com,"Jürgen Christ, Daniel Dietsch, Leonard Fichtner, Joanna Greulich, Matthias Heizmann, Jochen Hoenicke, Moritz Mohr, Alexander Nutz, Markus Pomrehn, Pascal Raiola, Tanja Schindler",3021856368 +44483,44756,741798,742163,741798,741798,,"Bitwuzla","https://bitwuzla.github.io/","https://bitwuzla.github.io/data/smtcomp2023/paper.pdf","Bitwuzla at the SMT-COMP 2023",yes,"^((QF_)?(A)?(BV)?(FP)?(FPLRA)?)$|^((QF_)?(A)?(UF)(BV|FP|BVFP|FPLRA|BVFPLRA))$","^((QF_)?(A)?(BV)?(FP)?(FPLRA)?)$|^((QF_)?(A)?(UF)(BV|FP|BVFP|FPLRA|BVFPLRA))$","^((QF_)?(A)?(BV)?(FP)?(FPLRA)?)$|^((QF_)?(A)?(UF)(BV|FP|BVFP|FPLRA|BVFPLRA))$","^((QF_)?(A)?(BV)?(FP)?(FPLRA)?)$|^((QF_)?(A)?(UF)(BV|FP|BVFP|FPLRA|BVFPLRA))$","-","-","-",ABV;ABVFP;ABVFPLRA;AUFBV;AUFBVFP;BV;BVFP;BVFPLRA;FP;FPLRA;QF_ABV;QF_ABVFP;QF_ABVFPLRA;QF_AUFBV;QF_AUFBVFP;QF_BV;QF_BVFP;QF_BVFPLRA;QF_FP;QF_FPLRA;QF_UFBV;QF_UFFP;UFBV;UFBVFP,ABVFPLRA;BV;BVFP;BVFPLRA;QF_ABV;QF_ABVFP;QF_ABVFPLRA;QF_AUFBV;QF_BV;QF_BVFP;QF_BVFPLRA;QF_FP;QF_UFBV;QF_UFFP,QF_ABV;QF_ABVFP;QF_ABVFPLRA;QF_AUFBV;QF_AUFBVFP;QF_BV;QF_BVFP;QF_BVFPLRA;QF_FP;QF_FPLRA;QF_UFBV;QF_UFFP,ABV;ABVFP;ABVFPLRA;AUFBV;AUFBVFP;BV;BVFP;BVFPLRA;FP;FPLRA;QF_ABV;QF_ABVFP;QF_ABVFPLRA;QF_AUFBV;QF_AUFBVFP;QF_BV;QF_BVFP;QF_BVFPLRA;QF_FP;QF_FPLRA;QF_UFBV;QF_UFFP;UFBV;UFBVFP,,,,"","","","preiner@cs.stanford.edu","Aina Niemetz, Mathias Preiner",42 +44484,44484,741777,,,741777,,"Vampire","https://vprover.github.io/","https://vprover.github.io/reports/smtcomp2023.pdf","Vampire 4.8-SMT System Description",yes,"^(AX?)?(UF)?(DT)?([LN]I?R?A)?([IR]DL)?$","-","-","^(AX?)?(UF)?(DT)?([LN]I?R?A)?([IR]DL)?$","^(AX?)?(UF)?(DT)?([LN]I?R?A)?([IR]DL)?$","^(AX?)?(UF)?(DT)?([LN]I?R?A)?([IR]DL)?$","^(AX?)?(UF)?(DT)?([LN]I?R?A)?([IR]DL)?$",ALIA;ANIA;AUFDTLIA;AUFDTLIRA;AUFDTNIRA;AUFLIA;AUFLIRA;AUFNIA;AUFNIRA;LIA;LRA;NIA;NRA;UF;UFDT;UFDTLIA;UFDTLIRA;UFDTNIA;UFDTNIRA;UFIDL;UFLIA;UFLRA;UFNIA,,,ALIA;ANIA;AUFDTLIA;AUFDTLIRA;AUFDTNIRA;AUFLIA;AUFLIRA;AUFNIA;AUFNIRA;LIA;LRA;NIA;NRA;UF;UFDT;UFDTLIA;UFDTLIRA;UFDTNIA;UFDTNIRA;UFIDL;UFLIA;UFLRA;UFNIA,ALIA;ANIA;AUFDTLIA;AUFDTLIRA;AUFDTNIRA;AUFLIA;AUFLIRA;AUFNIA;AUFNIRA;LIA;LRA;NIA;NRA;UF;UFDT;UFDTLIA;UFDTLIRA;UFDTNIA;UFDTNIRA;UFIDL;UFLIA;UFLRA;UFNIA,ALIA;ANIA;AUFDTLIA;AUFDTLIRA;AUFDTNIRA;AUFLIA;AUFLIRA;AUFNIA;AUFNIRA;LIA;LRA;NIA;NRA;UF;UFDT;UFDTLIA;UFDTLIRA;UFDTNIA;UFDTNIRA;UFIDL;UFLIA;UFLRA;UFNIA,ALIA;ANIA;AUFDTLIA;AUFDTLIRA;AUFDTNIRA;AUFLIA;AUFLIRA;AUFNIA;AUFNIRA;LIA;LRA;NIA;NRA;UF;UFDT;UFDTLIA;UFDTLIRA;UFDTNIA;UFDTNIRA;UFIDL;UFLIA;UFLRA;UFNIA,"","Z3 4.9.1","","giles.reger@manchester.ac.uk","Giles Reger, Martin Suda, Andrei Voronkov, Laura Kovacs, Ahmed Bhayat, Bernhard Gleiss, Marton Hajdu, Petra Hozzova, Jakob Rath, Michael Rawson, Johannes Schoisswohl",6586 +44479;44480(inc);44481(mv),44479;44751(inc);44755(mv),741776,742180,741797,741776,,"Yices2","https://yices.csl.sri.com","https://ahmed-irfan.github.io/sys-desc/yices2-smtcomp-2023.pdf","Yices 2 in SMT-COMP 2023",yes,"^((QF_){1}(AX?)?(UF)?(BV)?([LN][IR]*A|[IR]DL)?|UF)$","^((QF_){1}(AX?)?(UF)?(BV)?([LN][IR]*A|[IR]DL)?|UF)$","^((QF_){1}(AX?)?(UF)?(BV)?([LN][IR]*A|[IR]DL)?|UF)$","^((QF_){1}(AX?)?(UF)?(BV)?([L][IR]*A|[IR]DL)?|UF)$","^$","^$","^$",QF_ABV;QF_ALIA;QF_ANIA;QF_AUFBV;QF_AUFLIA;QF_AUFNIA;QF_AX;QF_BV;QF_IDL;QF_LIA;QF_LIRA;QF_LRA;QF_NIA;QF_NIRA;QF_NRA;QF_RDL;QF_UF;QF_UFBV;QF_UFIDL;QF_UFLIA;QF_UFLRA;QF_UFNIA;QF_UFNRA;UF,QF_ABV;QF_ALIA;QF_ANIA;QF_AUFBV;QF_AUFBVLIA;QF_AUFBVNIA;QF_AUFLIA;QF_BV;QF_LIA;QF_LRA;QF_NIA;QF_UF;QF_UFBV;QF_UFBVLIA;QF_UFLIA;QF_UFLRA;QF_UFNIA;QF_UFNRA;UF,QF_ABV;QF_ALIA;QF_ANIA;QF_AUFBV;QF_AUFLIA;QF_AUFNIA;QF_AX;QF_BV;QF_IDL;QF_LIA;QF_LIRA;QF_LRA;QF_NIA;QF_NIRA;QF_NRA;QF_RDL;QF_UF;QF_UFBV;QF_UFIDL;QF_UFLIA;QF_UFLRA;QF_UFNIA;QF_UFNRA,QF_ABV;QF_ALIA;QF_AUFBV;QF_AUFLIA;QF_AX;QF_BV;QF_IDL;QF_LIA;QF_LIRA;QF_LRA;QF_RDL;QF_UF;QF_UFBV;QF_UFIDL;QF_UFLIA;QF_UFLRA;UF,,,,"","","","ahmed.irfan@sri.com","Bruno Dutertre, Aman Goel, St ́ephane Graham-Lengrand, Ahmed Irfan, Dejan Jovanovi ́c, Ian A. Mason",0 +39111,39111,741773,,,,,"YicesQS","https://github.com/disteph/yicesQS","http://www.csl.sri.com/users/sgl/Work/Reports/2023-yicesQS.pdf","YicesQS 2023, an extension of Yices for quantified satisfiability",yes,"^(LRA|NRA|LIA|NIA|BV)$","-","-","-","-","-","-",BV;LIA;LRA;NIA;NRA,,,,,,,"","","Yices2","stephane.graham-lengrand@csl.sri.com","Stephane Graham-Lengrand",0 +41385,41385,741774,742176,741774,,,"STP","https://stp.github.io/","https://github.com/stp/docs/tree/master/smt2023-descr/descr.pdf","STP 2023",yes,"QF_BV","QF_BV","QF_BV","-","-","-","-",QF_BV,QF_BV,QF_BV,,,,,"","","","trev_abroad@yahoo.com","Various",16 +44489,44761,741804,741803,,741804,,"UltimateEliminator+MathSAT","https://www.ultimate-pa.org/?ui=tool&tool=eliminator","https://www.informatik.uni-freiburg.de/~heizmann/tmp/2023UltimateEliminator.pdf","Ultimate Eliminator at SMT-COMP 2023",yes,"^((AX?)?(UF)?(BV)?(FP)?S?([LN][IR]*A|[IR]DL)?)$","^((AX?)?(UF)?(BV)?(FP)?S?([LN][IR]*A|[IR]DL)?)$","^$","^((AX?)?(UF)?(BV)?(FP)?S?([LN][IR]*A|[IR]DL)?)$","^$","^$","^$",ABV;ABVFP;ABVFPLRA;ALIA;ANIA;AUFBV;AUFBVFP;AUFLIA;AUFLIRA;AUFNIA;AUFNIRA;BV;BVFP;BVFPLRA;FP;FPLRA;LIA;LRA;NIA;NRA;UF;UFBV;UFBVFP;UFBVLIA;UFIDL;UFLIA;UFLRA;UFNIA,ABVFPLRA;ALIA;ANIA;AUFNIRA;BV;BVFP;BVFPLRA;LIA;LRA;UF;UFLRA;UFNIA;UFNRA,,ABV;ABVFP;ABVFPLRA;ALIA;ANIA;AUFBV;AUFBVFP;AUFLIA;AUFLIRA;AUFNIA;AUFNIRA;BV;BVFP;BVFPLRA;FP;FPLRA;LIA;LRA;NIA;NRA;UF;UFBV;UFBVFP;UFBVLIA;UFIDL;UFLIA;UFLRA;UFNIA,,,,"","MathSAT-5.6.9","","heizmann@informatik.uni-freiburg.de","TBA",0 +36885,-1,,,,,,"Q3B-pBDD","https://www.fi.muni.cz/~xpavlik5/Q3B-pBDD/","https://www.fi.muni.cz/~xpavlik5/Q3B-pBDD/SMTCOMP23_description.pdf","Q3B-pBDD in SMT Competition 2023",no,"-","-","-","-","-","-","-",,,,,,,,"Q3B","","","469088@mail.muni.cz","Matěj Pavlík, Martin Jonáš, Jan Strejček",78549 +39062,44707,741782,,,,,"Q3B","https://github.com/martinjonas/q3b","https://www.fi.muni.cz/~xjonas/q3b_smtcomp23.pdf","Q3B in SMT Competition 2023",yes,"^BV$","-","-","-","-","-","-",BV,,,,,,,"","","","martin.jonas@mail.muni.cz","Martin Jonáš, Jan Strejček, Jakub Szymsza",2353490582350 +44407,44716,741785,,,,,"Z3-Noodler","https://github.com/VeriFIT/z3-noodler","http://www.fit.vutbr.cz/~holik/pub/z3-noodler_sysdecr_2023.pdf","Z3-Noodler, System description for SMT-comp 2023",yes,"QF_S|QF_SLIA","-","-","-","-","-","-",QF_S;QF_SLIA,,,,,,,"","","Z3","holik@fit.vutbr.cz","Vojťech Havlena, Juraj Síč, David Chocholatý, Ondřej Lengál, Lukáš Holík, Yu-Fang Chen",6590 +44516;44517(inc),44737;44738(inc),741788,742168,741787,741789,742165,"cvc5","https://cvc5.github.io","https://homepage.cs.uiowa.edu/~hschrr/cvc5.pdf","cvc5 at the SMT Competition 2023",yes,"^.*$","^.*$","^.*$","^.*$","^.*$","-","-",ABV;ABVFP;ABVFPLRA;ALIA;ANIA;AUFBV;AUFBVDTLIA;AUFBVDTNIA;AUFBVDTNIRA;AUFBVFP;AUFDTLIA;AUFDTLIRA;AUFDTNIRA;AUFFPDTNIRA;AUFLIA;AUFLIRA;AUFNIA;AUFNIRA;BV;BVFP;BVFPLRA;FP;FPLRA;LIA;LRA;NIA;NRA;QF_ABV;QF_ABVFP;QF_ABVFPLRA;QF_ALIA;QF_ANIA;QF_AUFBV;QF_AUFBVFP;QF_AUFLIA;QF_AUFNIA;QF_AX;QF_BV;QF_BVFP;QF_BVFPLRA;QF_DT;QF_FP;QF_FPLRA;QF_IDL;QF_LIA;QF_LIRA;QF_LRA;QF_NIA;QF_NIRA;QF_NRA;QF_RDL;QF_S;QF_SLIA;QF_SNIA;QF_UF;QF_UFBV;QF_UFBVDT;QF_UFDT;QF_UFDTLIA;QF_UFDTLIRA;QF_UFDTNIA;QF_UFFP;QF_UFFPDTNIRA;QF_UFIDL;QF_UFLIA;QF_UFLRA;QF_UFNIA;QF_UFNRA;UF;UFBV;UFBVDT;UFBVFP;UFBVLIA;UFDT;UFDTLIA;UFDTLIRA;UFDTNIA;UFDTNIRA;UFFPDTNIRA;UFIDL;UFLIA;UFLRA;UFNIA,ABVFPLRA;ALIA;ANIA;AUFNIRA;BV;BVFP;BVFPLRA;LIA;LRA;QF_ABV;QF_ABVFP;QF_ABVFPLRA;QF_ALIA;QF_ANIA;QF_AUFBV;QF_AUFBVLIA;QF_AUFBVNIA;QF_AUFLIA;QF_BV;QF_BVFP;QF_BVFPLRA;QF_FP;QF_LIA;QF_LRA;QF_NIA;QF_UF;QF_UFBV;QF_UFBVLIA;QF_UFFP;QF_UFLIA;QF_UFLRA;QF_UFNIA;QF_UFNRA;UF;UFDTNIA;UFLRA;UFNIA;UFNRA,QF_ABV;QF_ABVFP;QF_ABVFPLRA;QF_ALIA;QF_ANIA;QF_AUFBV;QF_AUFBVFP;QF_AUFLIA;QF_AUFNIA;QF_AX;QF_BV;QF_BVFP;QF_BVFPLRA;QF_DT;QF_FP;QF_FPLRA;QF_IDL;QF_LIA;QF_LIRA;QF_LRA;QF_NIA;QF_NIRA;QF_NRA;QF_RDL;QF_UF;QF_UFBV;QF_UFFP;QF_UFIDL;QF_UFLIA;QF_UFLRA;QF_UFNIA;QF_UFNRA,ABV;ABVFP;ABVFPLRA;ALIA;ANIA;AUFBV;AUFBVDTLIA;AUFBVDTNIA;AUFBVDTNIRA;AUFBVFP;AUFDTLIA;AUFDTLIRA;AUFDTNIRA;AUFFPDTNIRA;AUFLIA;AUFLIRA;AUFNIA;AUFNIRA;BV;BVFP;BVFPLRA;FP;FPLRA;LIA;LRA;NIA;NRA;QF_ABV;QF_ABVFP;QF_ABVFPLRA;QF_ALIA;QF_ANIA;QF_AUFBV;QF_AUFBVFP;QF_AUFLIA;QF_AUFNIA;QF_AX;QF_BV;QF_BVFP;QF_BVFPLRA;QF_DT;QF_FP;QF_FPLRA;QF_IDL;QF_LIA;QF_LIRA;QF_LRA;QF_NIA;QF_NIRA;QF_NRA;QF_RDL;QF_S;QF_SLIA;QF_SNIA;QF_UF;QF_UFBV;QF_UFBVDT;QF_UFDT;QF_UFDTLIA;QF_UFDTLIRA;QF_UFDTNIA;QF_UFFP;QF_UFFPDTNIRA;QF_UFIDL;QF_UFLIA;QF_UFLRA;QF_UFNIA;QF_UFNRA;UF;UFBV;UFBVDT;UFBVFP;UFBVLIA;UFDT;UFDTLIA;UFDTLIRA;UFDTNIA;UFDTNIRA;UFFPDTNIRA;UFIDL;UFLIA;UFLRA;UFNIA,ABV;ABVFP;ABVFPLRA;ALIA;ANIA;AUFBV;AUFBVDTLIA;AUFBVDTNIA;AUFBVDTNIRA;AUFBVFP;AUFDTLIA;AUFDTLIRA;AUFDTNIRA;AUFFPDTNIRA;AUFLIA;AUFLIRA;AUFNIA;AUFNIRA;BV;BVFP;BVFPLRA;FP;FPLRA;LIA;LRA;NIA;NRA;QF_ABV;QF_ABVFP;QF_ABVFPLRA;QF_ALIA;QF_ANIA;QF_AUFBV;QF_AUFBVFP;QF_AUFLIA;QF_AUFNIA;QF_AX;QF_BV;QF_BVFP;QF_BVFPLRA;QF_DT;QF_FP;QF_FPLRA;QF_IDL;QF_LIA;QF_LIRA;QF_LRA;QF_NIA;QF_NIRA;QF_NRA;QF_RDL;QF_S;QF_SLIA;QF_SNIA;QF_UF;QF_UFBV;QF_UFBVDT;QF_UFDT;QF_UFDTLIA;QF_UFDTLIRA;QF_UFDTNIA;QF_UFFP;QF_UFFPDTNIRA;QF_UFIDL;QF_UFLIA;QF_UFLRA;QF_UFNIA;QF_UFNRA;UF;UFBV;UFBVDT;UFBVFP;UFBVLIA;UFDT;UFDTLIA;UFDTLIRA;UFDTNIA;UFDTNIRA;UFFPDTNIRA;UFIDL;UFLIA;UFLRA;UFNIA,ABV;ABVFP;ABVFPLRA;ALIA;ANIA;AUFBV;AUFBVDTLIA;AUFBVDTNIA;AUFBVDTNIRA;AUFBVFP;AUFDTLIA;AUFDTLIRA;AUFDTNIRA;AUFFPDTNIRA;AUFLIA;AUFLIRA;AUFNIA;AUFNIRA;BV;BVFP;BVFPLRA;FP;FPLRA;LIA;LRA;NIA;NRA;QF_ABV;QF_ABVFP;QF_ABVFPLRA;QF_ALIA;QF_ANIA;QF_AUFBV;QF_AUFBVFP;QF_AUFLIA;QF_AUFNIA;QF_AX;QF_BV;QF_BVFP;QF_BVFPLRA;QF_DT;QF_FP;QF_FPLRA;QF_IDL;QF_LIA;QF_LIRA;QF_LRA;QF_NIA;QF_NIRA;QF_NRA;QF_RDL;QF_S;QF_SLIA;QF_SNIA;QF_UF;QF_UFBV;QF_UFBVDT;QF_UFDT;QF_UFDTLIA;QF_UFDTLIRA;QF_UFDTNIA;QF_UFFP;QF_UFFPDTNIRA;QF_UFIDL;QF_UFLIA;QF_UFLRA;QF_UFNIA;QF_UFNRA;UF;UFBV;UFBVDT;UFBVFP;UFBVLIA;UFDT;UFDTLIA;UFDTLIRA;UFDTNIA;UFDTNIRA;UFFPDTNIRA;UFIDL;UFLIA;UFLRA;UFNIA,ABV;ABVFP;ABVFPLRA;ALIA;ANIA;AUFBV;AUFBVDTLIA;AUFBVDTNIA;AUFBVDTNIRA;AUFBVFP;AUFDTLIA;AUFDTLIRA;AUFDTNIRA;AUFFPDTNIRA;AUFLIA;AUFLIRA;AUFNIA;AUFNIRA;BV;BVFP;BVFPLRA;FP;FPLRA;LIA;LRA;NIA;NRA;QF_ABV;QF_ABVFP;QF_ABVFPLRA;QF_ALIA;QF_ANIA;QF_AUFBV;QF_AUFBVFP;QF_AUFLIA;QF_AUFNIA;QF_AX;QF_BV;QF_BVFP;QF_BVFPLRA;QF_DT;QF_FP;QF_FPLRA;QF_IDL;QF_LIA;QF_LIRA;QF_LRA;QF_NIA;QF_NIRA;QF_NRA;QF_RDL;QF_S;QF_SLIA;QF_SNIA;QF_UF;QF_UFBV;QF_UFBVDT;QF_UFDT;QF_UFDTLIA;QF_UFDTLIRA;QF_UFDTNIA;QF_UFFP;QF_UFFPDTNIRA;QF_UFIDL;QF_UFLIA;QF_UFLRA;QF_UFNIA;QF_UFNRA;UF;UFBV;UFBVDT;UFBVFP;UFBVLIA;UFDT;UFDTLIA;UFDTLIRA;UFDTNIA;UFDTNIRA;UFFPDTNIRA;UFIDL;UFLIA;UFLRA;UFNIA,"","","","hansjoerg-schurr@uiowa.edu","Leni Aniva, Haniel Barbosa, Clark Barrett, Martin Brain, Vinícius Camillo, Gereon Kremer, Hanna Lachnitt, Abdalrhman Mohamed, Mudathir Mohamed, Aina Niemetz, Andres Nötzli, Alex Ozdemir, Mathias Preiner, Andrew Reynolds, Ying Sheng, Cesare Tinelli, Amalee Wilson, Yoni Zohar",1965 +44515,44736,,,,,742170,"cvc5-lfsc","https://cvc5.github.io/","https://homepage.cs.uiowa.edu/~hschrr/cvc5.pdf","cvc5 at the SMT Competition 2023",yes,"-","-","-","-","^.*$","-","-",,,,,ABV;ABVFP;ABVFPLRA;ALIA;ANIA;AUFBV;AUFBVDTLIA;AUFBVDTNIA;AUFBVDTNIRA;AUFBVFP;AUFDTLIA;AUFDTLIRA;AUFDTNIRA;AUFFPDTNIRA;AUFLIA;AUFLIRA;AUFNIA;AUFNIRA;BV;BVFP;BVFPLRA;FP;FPLRA;LIA;LRA;NIA;NRA;QF_ABV;QF_ABVFP;QF_ABVFPLRA;QF_ALIA;QF_ANIA;QF_AUFBV;QF_AUFBVFP;QF_AUFLIA;QF_AUFNIA;QF_AX;QF_BV;QF_BVFP;QF_BVFPLRA;QF_DT;QF_FP;QF_FPLRA;QF_IDL;QF_LIA;QF_LIRA;QF_LRA;QF_NIA;QF_NIRA;QF_NRA;QF_RDL;QF_S;QF_SLIA;QF_SNIA;QF_UF;QF_UFBV;QF_UFBVDT;QF_UFDT;QF_UFDTLIA;QF_UFDTLIRA;QF_UFDTNIA;QF_UFFP;QF_UFFPDTNIRA;QF_UFIDL;QF_UFLIA;QF_UFLRA;QF_UFNIA;QF_UFNRA;UF;UFBV;UFBVDT;UFBVFP;UFBVLIA;UFDT;UFDTLIA;UFDTLIRA;UFDTNIA;UFDTNIRA;UFFPDTNIRA;UFIDL;UFLIA;UFLRA;UFNIA,,,"44516","","","hans-joerg@schurr.at","Leni Aniva, Haniel Barbosa, Clark Barrett, Martin Brain, Vinícius Camillo, Gereon Kremer, Hanna Lachnitt, Abdalrhman Mohamed, Mudathir Mohamed, Aina Niemetz, Andres Nötzli, Alex Ozdemir, Mathias Preiner, Andrew Reynolds, Ying Sheng, Cesare Tinelli, Amalee Wilson, Yoni Zohar",52240 +44686,44790,741794,742171,741794,,,"OpenSMT","https://github.com/usi-verification-and-security/opensmt","https://github.com/usi-verification-and-security/opensmt-doc/blob/master/abstract-2023.pdf","The OpenSMT Solver in SMT-COMP 2023",yes,"^QF_(AX|LRA|LIA|UF|UFLRA|UFLIA|RDL|IDL|UFIDL|ALIA|AUFLIA)$","^QF_(AX|LRA|LIA|UF|UFLRA|UFLIA|RDL|IDL|UFIDL|ALIA|AUFLIA)$","^QF_(LRA|LIA|UF|UFLRA|UFLIA|RDL|IDL|UFIDL)$","-","-","-","-",QF_ALIA;QF_AUFLIA;QF_AX;QF_IDL;QF_LIA;QF_LRA;QF_RDL;QF_UF;QF_UFIDL;QF_UFLIA;QF_UFLRA,QF_ALIA;QF_AUFLIA;QF_LIA;QF_LRA;QF_UF;QF_UFLIA;QF_UFLRA,QF_IDL;QF_LIA;QF_LRA;QF_RDL;QF_UF;QF_UFIDL;QF_UFLIA;QF_UFLRA,,,,,"","","","martin.blicha@gmail.com","Martin Blicha, Konstantin I. Britikov, Antti E. J. Hyvärinen, Rodrigo Otoni, Natasha Sharygina",9471002 +44678,44678,741779,,741779,,,"Yaga","https://github.com/d3sformal/yaga","https://d3s.mff.cuni.cz/files/research/formal-methods/yaga/abstract-2023.pdf","The Yaga SMT Solver in SMT-COMP 2023",yes,"^QF_LRA$","-","^QF_LRA$","-","-","-","-",QF_LRA,,QF_LRA,,,,,"","","","blicha@d3s.mff.cuni.cz","Drahomír Hanák, Martin Blicha, Jan Kofroň",3621199 +44485,44764,741805,,,,,"z3-alpha","https://z3string.github.io/","https://www.dropbox.com/s/52svozcju5mc755/z3alpha_description.pdf?dl=0","Z3-alpha: a reinforcement learning guided SMT solver",yes,"^(QF_NIA|QF_NRA|QF_S|QF_SLIA|QF_SNIA)$","-","-","-","-","-","-",QF_NIA;QF_NRA;QF_S;QF_SLIA;QF_SNIA,,,,,,,"","","Z3","z52lu@uwaterloo.ca","Zhengyang Lu, Stefan Siemer, Piyush Jha, Florin Manea, Joel Day, and Vijay Ganesh∗",33 +44524,44768,741808,,,,,"iProver","https://gitlab.com/korovin/iprover","http://www.cs.man.ac.uk/~korovink/my_pub/iprover-smt-comp-2023.pdf","iProver v3.8 (SMT-COMP-2023)",yes,"^((AX?)?(UF)?(DT)?([LN][IR]*A|[IR]DL)?)$","-","-","-","-","-","^((AX?)?(UF)?(DT)?([LN][IR]*A|[IR]DL)?)$",ALIA;ANIA;AUFDTLIA;AUFDTLIRA;AUFDTNIRA;AUFLIA;AUFLIRA;AUFNIA;AUFNIRA;LIA;LRA;NIA;NRA;UF;UFDT;UFDTLIA;UFDTLIRA;UFDTNIA;UFDTNIRA;UFIDL;UFLIA;UFLRA;UFNIA,,,,,,ALIA;ANIA;AUFDTLIA;AUFDTLIRA;AUFDTNIRA;AUFLIA;AUFLIRA;AUFNIA;AUFNIRA;LIA;LRA;NIA;NRA;UF;UFDT;UFDTLIA;UFDTLIRA;UFDTNIA;UFDTNIRA;UFIDL;UFLIA;UFLRA;UFNIA,"","Z3 v4.11.2, MiniSAT v2.0, Vampire v4.7","","konstantin.korovin@manchester.ac.uk","Konstantin Korovin, Andre Duarte, Edvard K. Holden",191937 +44525,44760,741802,,,,,"UltimateIntBlastingWrapper+SMTInterpol","https://ultimate-pa.org/","https://www.informatik.uni-freiburg.de/~heizmann/tmp/2023UltimateIntBlastingWrapper.pdf","Ultimate IntBlastingWrapper",yes,"^((QF_)?(AX?)?(UF)?(BV)(DT)?S?([L][IR]*A|[IR]DL)?)$","-","-","-","-","-","-",ABV;AUFBV;AUFBVDTLIA;BV;QF_ABV;QF_AUFBV;QF_BV;QF_UFBV;QF_UFBVDT;UFBV;UFBVDT;UFBVLIA,,,,,,,"","SMTInterpol 2.5-1252-g82eb3a0","","heizmann@informatik.uni-freiburg.de","Max Barth, Matthias Heizmann",4294967296 diff --git a/tests/test1.json b/tests/test1.json new file mode 100644 index 00000000..19f4612a --- /dev/null +++ b/tests/test1.json @@ -0,0 +1,23 @@ +{ + "name": "mysolver", + "contributors": [ + "foo", + { + "name": "bar", + "website": "http://bar.com/" + } + ], + "contacts": ["foo bar "], + "archive": { + "url": "http://example.com/solver.tar.gz", + "h": { "sha256": "012345" } + }, + "website": "http://example.com/", + "system_description": "http://example.com/system.pdf", + "command": ["foo", "--super-fast"], + "solver_type": "Standalone", + "participations": [ + { "tracks": ["SingleQuery"], "divisions": ["QF_Strings"] }, + { "tracks": ["SingleQuery"], "logics": "QF_.*" } + ] +} diff --git a/tests/test_bad.json b/tests/test_bad.json new file mode 100644 index 00000000..7c1fb0f8 --- /dev/null +++ b/tests/test_bad.json @@ -0,0 +1 @@ +{ "name": "missing field" } diff --git a/tests/test_validate.py b/tests/test_validate.py new file mode 100644 index 00000000..747956a7 --- /dev/null +++ b/tests/test_validate.py @@ -0,0 +1,42 @@ +from os import path +from pathlib import Path + +import pytest +from typer.testing import CliRunner + +from smtcomp.convert_csv import convert_csv +from smtcomp.main import app +from smtcomp.submission import read + +runner = CliRunner() +good_cases = ["tests/test1.json", "submissions/template/template.json"] +bad_cases = ["test_bad.json"] + + +@pytest.mark.parametrize("name", good_cases) +def test_good_json(name: str) -> None: + result = runner.invoke(app, ["validate", name]) + assert result.stdout == "" + assert result.exit_code == 0 + + +@pytest.mark.parametrize("name", bad_cases) +def test_bad_json(name: str) -> None: + result = runner.invoke(app, ["validate", path.join("tests", name)]) + assert result.exit_code == 1 + + +submissions = list(Path("submissions").glob("*.json")) + + +@pytest.mark.parametrize("submission", submissions) +def test_submission(submission: str) -> None: + read(submission) + + +csv = ["tests/SMT-COMP 2023 System Registration.csv"] + + +@pytest.mark.parametrize("csv", csv) +def test_csv(csv: str, tmp_path: Path) -> None: + convert_csv(Path(csv), tmp_path) diff --git a/tox.ini b/tox.ini new file mode 100644 index 00000000..a44a21bd --- /dev/null +++ b/tox.ini @@ -0,0 +1,18 @@ +[tox] +skipsdist = true +envlist = py38, py39, py310, py311 + +[gh-actions] +python = + 3.8: py38 + 3.9: py39 + 3.10: py310 + 3.11: py311 + +[testenv] +passenv = PYTHON_VERSION +allowlist_externals = poetry +commands = + poetry install -v + pytest --doctest-modules tests --cov --cov-config=pyproject.toml --cov-report=xml + mypy diff --git a/web/content/_index.md b/web/content/_index.md index f943d679..5c1db6f2 100644 --- a/web/content/_index.md +++ b/web/content/_index.md @@ -3,8 +3,7 @@ layout = 'single' title = 'SMT-COMP 2024' +++ -The *19th International Satisfiability Modulo Theories Competition (SMT-COMP -2024)* is part of the +The _19th International Satisfiability Modulo Theories Competition (SMT-COMP 2024)_ is part of the [SMT Workshop 2024](https://smt-workshop.cs.uiowa.edu/2024), affiliated with [CAV-36](https://i-cav.org/2024). The SMT Workshop will include a block of time to @@ -23,6 +22,7 @@ present the competitors and results of the competition. - **July 22–23** SMT Workshop (presentation of results) ### Organizers + - [Martin Bromberger](https://www.mpi-inf.mpg.de/departments/automation-of-logic/people/martin-bromberger) (chair) - MPI für Informatik, Germany - [François Bobot](https://github.com/bobot) - CEA List, France - [Martin Jonáš](https://fi.muni.cz/~xjonas) - Masaryk University, Czechia @@ -31,6 +31,7 @@ SMT-COMP 2024 is organized under the direction of the SMT Steering Committee. ### Acknowledgment + - [Dirk Beyer](https://www.sosy-lab.org/people/beyer/) (VerifierCloud) - [Aaron Stump](http://homepage.divms.uiowa.edu/~astump/) (StarExec) - [Clark Barrett](http://theory.stanford.edu/~barrett/), @@ -38,5 +39,5 @@ Committee. [Aina Niemetz](https://cs.stanford.edu/~niemetz/), [Mathias Preiner](https://cs.stanford.edu/~preiner/), and [Hans-Jörg Schurr](https://team.inria.fr/veridis/schurr/) -([smt-lib](http://smtlib.cs.uiowa.edu/index.shtml) maintenance) + ([smt-lib](http://smtlib.cs.uiowa.edu/index.shtml) maintenance) - Jonathan Eidelman and Mike Whalen (Cloud and Parallel Tracks AWS Support) diff --git a/web/content/benchmark_submission/index.md b/web/content/benchmark_submission/index.md index b27ab4ef..cb0f18f5 100644 --- a/web/content/benchmark_submission/index.md +++ b/web/content/benchmark_submission/index.md @@ -19,8 +19,9 @@ If you have any questions please contact one of the SMT-LIB maintainers the same as the ones organizing the competition. 1. The SMT-LIB benchmark collection is co-managed by - - [Clark Barrett](http://www.cs.stanford.edu/~barrett), [barrett@cs.stanford.edu](mailto:barrett@cs.stanford.edu) - - [Pascal Fontaine](https://members.loria.fr/PFontaine/), [pascal.fontaine@uliege.be](mailto:pascal.fontaine@uliege.be) - - [Aina Niemetz](https://cs.stanford.edu/~niemetz/), [niemetz@cs.stanford.edu](mailto:niemetz@cs.stanford.edu) - - [Mathias Preiner](https://cs.stanford.edu/~preiner/), [preiner@cs.stanford.edu](mailto:preiner@cs.stanford.edu) - - [Hans-Jörg Schurr](https://schurr.io/), [hans-jorg.schurr@inria.fr](mailto:hans-jorg.schurr@inria.fr) + +- [Clark Barrett](http://www.cs.stanford.edu/~barrett), [barrett@cs.stanford.edu](mailto:barrett@cs.stanford.edu) +- [Pascal Fontaine](https://members.loria.fr/PFontaine/), [pascal.fontaine@uliege.be](mailto:pascal.fontaine@uliege.be) +- [Aina Niemetz](https://cs.stanford.edu/~niemetz/), [niemetz@cs.stanford.edu](mailto:niemetz@cs.stanford.edu) +- [Mathias Preiner](https://cs.stanford.edu/~preiner/), [preiner@cs.stanford.edu](mailto:preiner@cs.stanford.edu) +- [Hans-Jörg Schurr](https://schurr.io/), [hans-jorg.schurr@inria.fr](mailto:hans-jorg.schurr@inria.fr) diff --git a/web/content/introduction/index.md b/web/content/introduction/index.md index 503cace4..5402b08f 100644 --- a/web/content/introduction/index.md +++ b/web/content/introduction/index.md @@ -5,6 +5,7 @@ draft = false +++ ### Decision procedures. + Decision procedures for Satisfiability Modulo Theories (SMT) are of continuing interest for many verification applications. SMT solvers are typically used for verification as backends: a verification problem or @@ -13,12 +14,13 @@ solver. The solver then attempts to report satisfiability or unsatisfiability of the formula. The advantage SMT solvers are usually considered to have over pure SAT solvers, which are also often used as verification backends (e.g., for bounded model checking), is the higher -level of abstraction at which they can operate. By implementing +level of abstraction at which they can operate. By implementing theories like arithmetic, arrays, and uninterpreted functions directly, SMT solvers have the promise to provide higher performance than SAT solvers working on encodings of such structures to the bit level. ### Standard formats. + The additional promise of SMT over pure SAT is balanced by additional challenges. Since SMT deals with first-order (most commonly quantifier-free) formulas instead of purely propositional ones, creation @@ -35,6 +37,7 @@ tool), common output formats for objects like proofs and models are also necessary for the adoption of SMT. ### Competition. + The Satisfiability Modulo Theories Competition (SMT-COMP) arose from the [SMT-LIB](https://smtlib.cs.uiowa.edu/) (Satisfiability Modulo Theories Library) initiative to spur adoption of the common, community-designed SMT-LIB formats, @@ -47,7 +50,7 @@ The first SMT-COMP was held in 2005 as a satellite event of the [17th International Conference on Computer-Aided Verification (CAV 2005)](http://www.cav2005.inf.ed.ac.uk/). The experience with SMT-COMP 2005 confirmed the community's expectations that a public competition would indeed motivate implementors of SMT solvers to adopt -the common SMT-LIB input format. [Subsequent SMT-COMPs](previous.html) have +the common SMT-LIB input format. [Subsequent SMT-COMPs](previous.html) have provided further evidence that such a competition can stimulate improvement in solver implementations: solvers entered in each competition have improved significantly over those in previous competitions. diff --git a/web/content/model/index.md b/web/content/model/index.md index 5fd939e6..b595acfd 100644 --- a/web/content/model/index.md +++ b/web/content/model/index.md @@ -15,20 +15,20 @@ SMTCOMP 2023. ## Partial functions Some theory functions are only partially defined, e.g., division by -zero. The SMT semantics states that a benchmark is sat if there is an +zero. The SMT semantics states that a benchmark is sat if there is an extension of these partial functions to a total function, i.e., the -value of the function can be chosen by the solver. This begs the +value of the function can be chosen by the solver. This begs the questions how a solver should describe the model it chose. We propose that solvers should give the values similar as for -uninterpreted functions using `define-fun`. The given function definition +uninterpreted functions using `define-fun`. The given function definition must coincide on the defined inputs with the values given by the -theory. To achieve this, the function definition may call the original theory +theory. To achieve this, the function definition may call the original theory function using the same name as the function that is defined. In the model validation track, solvers MUST give the values for an undefined input, if it affects the satisfiability of the benchmark. -A solver SHOULD give a concrete value for all undefined inputs. A +A solver SHOULD give a concrete value for all undefined inputs. A simple way to achieve this is with an `ite` expression that checks if the input leads to undefinedness and provides a concrete expression in that case and calls the original theory function in the other case. @@ -77,46 +77,49 @@ represented algebraic numbers. For the polynomial there are two main choices: -* Any polynomial with rational coefficients -* The unique reduced minimal polynomial with integer coefficients +- Any polynomial with rational coefficients +- The unique reduced minimal polynomial with integer coefficients For specifying the roots: -* An enclosure where that contains a unique root of the polynomial -* A rational which is the closest to a unique root -* The number of the root in a specific ordering. +- An enclosure where that contains a unique root of the polynomial +- A rational which is the closest to a unique root +- The number of the root in a specific ordering. The ordering can be smallest to greatest however it doesn't extend to complex values. Moreover, multiplicities must be taken into account. However, it offers the possibility to have a unique representation for an algebraic number. We identified two opposing objectives: + - Uniqueness of the representation - Human readability -The first objective allows checking syntactically which values are equal in a model. The human readability means that a human can get an understanding of the value of the number just by reading the value. We propose two ways to write values of algebraic numbers that corresponding to one objective each `root-of-with-ordering` and `root-of-with-interval`. In both case the polynomial with integer coefficient is represented by the list of coefficients in ascending order, i.e., from the coefficient of the constant term to the leading coefficient. For example, $x^2 - 2$ is represented as `((- 2) 0 1)`. For uniqueness, the coefficients should be coprime integers and the leading coefficient should be positive. - +The first objective allows checking syntactically which values are equal in a model. The human readability means that a human can get an understanding of the value of the number just by reading the value. We propose two ways to write values of algebraic numbers that corresponding to one objective each `root-of-with-ordering` and `root-of-with-interval`. In both case the polynomial with integer coefficient is represented by the list of coefficients in ascending order, i.e., from the coefficient of the constant term to the leading coefficient. For example, $x^2 - 2$ is represented as `((- 2) 0 1)`. For uniqueness, the coefficients should be coprime integers and the leading coefficient should be positive. -* `(root-of-with-ordering (coeffs p_0 p_1 ... p_n) i)` represents the `i`-th root ordered with multiplicity from the smallest to greatest of the polynomial `(p_0 p_1 ... p_n)`. Here, `i` is a numeral (non-negative integer) and is `0` for the smallest root of the polynomial. The polynomial must be the unique reduced minimal polynomial, in particular, it must only have simple roots. -* `(root-of-with-interval (coeffs p_0 p_1 ... p_n) min max)` represents the unique root between `min` and `max` of the polynomial `(p_0 p_1 ... p_n)`. `i` is an integer. `min` and `max` are rational model values, e.g. `(0.0 (/ 1.0 2.0))` for the interval `[0, .5]` or `((- 1.0) (/ (- 1.0) 2.0))` for the interval [-1, -.5]. +- `(root-of-with-ordering (coeffs p_0 p_1 ... p_n) i)` represents the `i`-th root ordered with multiplicity from the smallest to greatest of the polynomial `(p_0 p_1 ... p_n)`. Here, `i` is a numeral (non-negative integer) and is `0` for the smallest root of the polynomial. The polynomial must be the unique reduced minimal polynomial, in particular, it must only have simple roots. +- `(root-of-with-interval (coeffs p_0 p_1 ... p_n) min max)` represents the unique root between `min` and `max` of the polynomial `(p_0 p_1 ... p_n)`. `i` is an integer. `min` and `max` are rational model values, e.g. `(0.0 (/ 1.0 2.0))` for the interval `[0, .5]` or `((- 1.0) (/ (- 1.0) 2.0))` for the interval [-1, -.5]. The constraint on the uniqueness of the polynomial in `root-of-with-ordering` could perhaps be removed. (EDIT: - * a previous version did not require `coeffs`, but it is necessary in order to be an SMTLIB term. - * a previous version used `(min max)` instead of `min max`, but it is necessary in order to be an SMTLIB term. - * Because of a typo, the model validator will accept this year both `root-of-with-ordering` and `root-of-with-order` and both `root-of-with-interval` and `root-of-with-enclosure`. -) + +- a previous version did not require `coeffs`, but it is necessary in order to be an SMTLIB term. +- a previous version used `(min max)` instead of `min max`, but it is necessary in order to be an SMTLIB term. +- Because of a typo, the model validator will accept this year both `root-of-with-ordering` and `root-of-with-order` and both `root-of-with-interval` and `root-of-with-enclosure`. + ) ## Array values Models for arrays should be represented using the `store` and `const` functions: - ```smt2 - (define-fun b () (Array Int Real) - (store (store ((as const (Array Int Real)) 2.0) 0 1.0) 1 3.0)) - ``` + +```smt2 +(define-fun b () (Array Int Real) + (store (store ((as const (Array Int Real)) 2.0) 0 1.0) 1 3.0)) +``` There were two alternative suggestions but we rejected them for various reasons at least for this years conference. We will still outline them here to explain our reasoning. The alternatives are: + 1. By using an additional uninterpreted function: ```smt2 (define-fun b () (Array Int Real) @@ -131,7 +134,7 @@ There were two alternative suggestions but we rejected them for various reasons (define-fun b () (Array Int Real) (store (store (as @array0 (Array Int Real)) 0 1.0) 1 3.0)) ``` - Note that here the value is not uniquely defined. For our purpose, + Note that here the value is not uniquely defined. For our purpose, the array values must explicitly use store to define values for all indices that are read by any `select` in the benchmark. Different model values for arrays are considered @@ -139,7 +142,7 @@ There were two alternative suggestions but we rejected them for various reasons The const/store representation gives directly a constant term (that use the symbol `const` that is not defined by the SMTLIB format) when the first alternative requires to define an additional function. - The first alternative is more general +The first alternative is more general and can handle models for problems with quantifiers. Since SMT-LIB 3 should introduce anonymous functions we could backport the feature for the definition of model of arrays with the addition of the function `as-array` to convert from a function `(-> A B)` to an array `(Array A B)`. We would have @@ -157,7 +160,7 @@ the model validator to evaluate array equality (extensionality). So in order to keep the validator simple for quantifier free formulas, we require the first way of defining array models in quantifier free logic. The `const` function is only visible in the array models and not part of the -theory. A benchmark problem must not contain the `const` function in an +theory. A benchmark problem must not contain the `const` function in an assertion. To sum up, in quantifier free logic, models for arrays should be represented using `store` and `const` function. diff --git a/web/content/news/2023-11-12-test.md b/web/content/news/2023-11-12-test.md index cc0e5087..82d28d3d 100644 --- a/web/content/news/2023-11-12-test.md +++ b/web/content/news/2023-11-12-test.md @@ -13,8 +13,7 @@ Please let us know as soon as possible if you are considering submitting benchmarks, even if the material is not quite ready. We will work in close cooperation with the SMT-LIB maintainers to integrate such benchmarks into SMT-LIB. The deadline for submission -of new benchmarks to be used in the 2023 competition is March 31, -2023. +of new benchmarks to be used in the 2023 competition is March 31, 2023. If you have large complex benchmarks that are important to you and unsolved within some reasonable time limit, we are especially @@ -36,6 +35,6 @@ Sincerely, The organizing team -* François Bobot (chair), CEA List, France -* Martin Bromberger, MPI for Informatics, Germany -* Jochen Hoenicke, Certora, Israel +- François Bobot (chair), CEA List, France +- Martin Bromberger, MPI for Informatics, Germany +- Jochen Hoenicke, Certora, Israel diff --git a/web/content/publications/2005-design-and-results-of-the-1st-SMT-COMP.md b/web/content/publications/2005-design-and-results-of-the-1st-SMT-COMP.md index 24864355..d945e577 100644 --- a/web/content/publications/2005-design-and-results-of-the-1st-SMT-COMP.md +++ b/web/content/publications/2005-design-and-results-of-the-1st-SMT-COMP.md @@ -3,6 +3,7 @@ layout: default title: Design and Results of the 1st Satisfiability Modulo Theories Competition (SMT-COMP 2005). authors: Clark Barrett, Leonardo de Moura, and Aaron Stump --- + ```bibtex @article{BdMS05, author = {Clark Barrett and Leonardo de Moura and Aaron Stump}, diff --git a/web/content/publications/2006-design-and-results-of-the-2nd-SMT-COMP.md b/web/content/publications/2006-design-and-results-of-the-2nd-SMT-COMP.md index f56748ce..adb5aca9 100644 --- a/web/content/publications/2006-design-and-results-of-the-2nd-SMT-COMP.md +++ b/web/content/publications/2006-design-and-results-of-the-2nd-SMT-COMP.md @@ -3,6 +3,7 @@ layout: default title: Design and Results of the 2nd Annual Satisfiability Modulo Theories Competition (SMT-COMP 2006) authors: Clark Barrett, Leonardo de Moura, and Aaron Stump --- + ```bibtex @article{BdMS07, author = {Clark Barrett and Leonardo de Moura and Aaron Stump}, diff --git a/web/content/publications/2007-design-and-results-of-the-3rd-SMT-COMP.md b/web/content/publications/2007-design-and-results-of-the-3rd-SMT-COMP.md index 1b662e7e..d6becb74 100644 --- a/web/content/publications/2007-design-and-results-of-the-3rd-SMT-COMP.md +++ b/web/content/publications/2007-design-and-results-of-the-3rd-SMT-COMP.md @@ -4,6 +4,7 @@ title: Design and Results of the 3rd Annual Satisfiability Modulo Theories Compe authors: Clark Barrett, Morgan Deters, Albert Oliveras, and Aaron Stump web: https://dx.doi.org/10.1142/S0218213008004060 --- + ```bibtex @article{BDOS08, author = {Clark Barrett and Morgan Deters and Albert Oliveras and Aaron Stump}, diff --git a/web/content/publications/2008-design-and-results-of-the-4th-SMT-COMP.md b/web/content/publications/2008-design-and-results-of-the-4th-SMT-COMP.md index 902fbc42..e58c952a 100644 --- a/web/content/publications/2008-design-and-results-of-the-4th-SMT-COMP.md +++ b/web/content/publications/2008-design-and-results-of-the-4th-SMT-COMP.md @@ -4,6 +4,7 @@ title: Design and Results of the 4th Annual Satisfiability Modulo Theories Compe authors: Clark Barrett, Morgan Deters, Albert Oliveras, and Aaron Stump web: https://nyuscholars.nyu.edu/en/publications/design-and-results-of-the-4th-annual-satisfiability-modulo-theori --- + ```bibtex @techreport{BDOS10, author = {Clark Barrett and Morgan Deters and Albert Oliveras and Aaron Stump}, diff --git a/web/content/publications/2010-6-years-of-smt-comp.md b/web/content/publications/2010-6-years-of-smt-comp.md index b221c00a..2f520a09 100644 --- a/web/content/publications/2010-6-years-of-smt-comp.md +++ b/web/content/publications/2010-6-years-of-smt-comp.md @@ -3,6 +3,7 @@ title: 6 Years of SMT-COMP authors: Clark Barrett, Morgan Deters, Leonardo de Moura, Albert Oliveras, and Aaron Stump web: https://dx.doi.org/10.1007/s10817-012-9246-5 --- + ```bibtex @article{BDdMOS13, author = {Barrett, Clark and Deters, Morgan and de Moura, Leonardo and Oliveras, Albert and Stump, Aaron}, diff --git a/web/content/publications/2012-the-2012-competition.md b/web/content/publications/2012-the-2012-competition.md index 5e9d6980..d333cca9 100644 --- a/web/content/publications/2012-the-2012-competition.md +++ b/web/content/publications/2012-the-2012-competition.md @@ -4,6 +4,7 @@ title: The 2012 SMT Competition authors: David R. Cok, Alberto Griggio, Roberto Bruttomesso, and Morgan Deters web: https://doi.org/10.29007/gj66 --- + ```bibtex @misc{CGBD12, author = {David R. Cok and Alberto Griggio and Roberto Bruttomesso and Morgan Deters}, diff --git a/web/content/publications/2013-the-2013-evaluation.md b/web/content/publications/2013-the-2013-evaluation.md index 388e10c6..62acd5e7 100644 --- a/web/content/publications/2013-the-2013-evaluation.md +++ b/web/content/publications/2013-the-2013-evaluation.md @@ -4,6 +4,7 @@ title: The 2013 Evaluation of SMT-COMP and SMT-LIB authors: David R. Cok, Aaron Stump, and Tjark Weber web: https://dx.doi.org/10.1007/s10817-015-9328-2 --- + ```bibtex @article{CSW15, author = {David R. Cok and Aaron Stump and Tjark Weber}, diff --git a/web/content/publications/2014-the-2014-smt-competition.md b/web/content/publications/2014-the-2014-smt-competition.md index 9c30565e..e7022597 100644 --- a/web/content/publications/2014-the-2014-smt-competition.md +++ b/web/content/publications/2014-the-2014-smt-competition.md @@ -4,6 +4,7 @@ title: The 2014 SMT Competition authors: David R. Cok, David Déharbe, and Tjark Weber web: https://doi.org/10.3233/sat190109 --- + ```bibtex @article{CDW14, author = {Cok, David R. and David D{\'{e}}harbe and Tjark Weber}, diff --git a/web/content/publications/2018-the-smt-competition-2015-2018.md b/web/content/publications/2018-the-smt-competition-2015-2018.md index 186aff62..7d71294f 100644 --- a/web/content/publications/2018-the-smt-competition-2015-2018.md +++ b/web/content/publications/2018-the-smt-competition-2015-2018.md @@ -4,6 +4,7 @@ title: The SMT Competition 2015–2018 authors: Tjark Weber, Sylvain Conchon, David Déharbe, Matthias Heizmann, Aina Niemetz, and Giles Reger web: https://doi.org/10.3233/SAT190123 --- + ```bibtex @article{WCDHNR19, author = {Tjark Weber and diff --git a/web/content/solver_submission/index.md b/web/content/solver_submission/index.md new file mode 100644 index 00000000..f4d10daa --- /dev/null +++ b/web/content/solver_submission/index.md @@ -0,0 +1,13 @@ ++++ +title = 'Solver Submission' +date = 2024-02-16T22:38:03+01:00 +draft = false ++++ + +## Benchmark submission + +The solvers are submitted via pull requests to the SMT-COMP repository. To +submit your solver, please follow the +[instructions](https://github.com/SMT-COMP/smt-comp.github.io/tree/new_submission/submissions). +The detailed description of the fields in the solver JSON file is available in +the [schema](schema.html). diff --git a/web/content/solver_submission/schema.html b/web/content/solver_submission/schema.html new file mode 100644 index 00000000..6ff79141 --- /dev/null +++ b/web/content/solver_submission/schema.html @@ -0,0 +1,7 @@ + Submission

    Submission

    Type: object
    No Additional Properties

    Name

    Type: string

    Contributors

    Type: array

    Must contain a minimum of 1 items

    No Additional Items

    Each item of this array must be:

    Type: object

    Contributors in the developement of the solver. If only name is provided,
    it can be directly given.

    No Additional Properties
    Examples:

    "Jane Smith"
    +
    {
    +    "name": "Jane Smith",
    +    "website": "http://jane.smith.name"
    +}
    +

    Name

    Type: string

    Website

    Default: null

    Type: stringFormat: uri

    Must be at least 1 characters long

    Must be at most 2083 characters long

    Contacts

    Type: array

    Must contain a minimum of 1 items

    No Additional Items

    Each item of this array must be:

    Type: object

    Name and valide email "name "


    Example:

    "Jane Smith <jane.smith@edu.world>"
    +

    Name

    Type: string

    Email

    Type: string

    Default: null

    Type: object

    Url

    Type: stringFormat: uri

    Must be at least 1 characters long

    Must be at most 2083 characters long

    Default: null

    Type: object
    No Additional Properties

    Type: null
    Type: null

    Default: null

    Type: object
    No Additional Properties

    Binary

    Type: string

    Arguments

    Type: array of string Default: []
    No Additional Items

    Each item of this array must be:

    Compa Starexec

    Type: boolean Default: false
    Type: null

    Website

    Type: stringFormat: uri

    Must be at least 1 characters long

    Must be at most 2083 characters long

    System Description

    Type: stringFormat: uri

    Must be at least 1 characters long

    Must be at most 2083 characters long

    Type: enum (of string)

    Must be one of:

    • "wrapped"
    • "derived"
    • "Standalone"

    Type: array
    No Additional Items

    Each item of this array must be:

    Type: object
    No Additional Properties

    Tracks

    Type: array
    No Additional Items

    Each item of this array must be:

    Type: enum (of string)

    Must be one of:

    • "UnsatCore"
    • "SingleQuery"
    • "ProofExhibition"
    • "ModelValidation"
    • "Incremental"
    • "Cloud"
    • "Parallel"

    Type: array Default: []
    No Additional Items

    Each item of this array must be:

    Type: enum (of string)

    Must be one of:

    • "ABV"
    • "ABVFP"
    • "ABVFPLRA"
    • "ALIA"
    • "ANIA"
    • "AUFBV"
    • "AUFBVDTLIA"
    • "AUFBVDTNIA"
    • "AUFBVDTNIRA"
    • "AUFBVFP"
    • "AUFDTLIA"
    • "AUFDTLIRA"
    • "AUFDTNIRA"
    • "AUFFPDTNIRA"
    • "AUFLIA"
    • "AUFLIRA"
    • "AUFNIA"
    • "AUFNIRA"
    • "BV"
    • "BVFP"
    • "BVFPLRA"
    • "FP"
    • "FPLRA"
    • "LIA"
    • "LRA"
    • "NIA"
    • "NRA"
    • "QF_ABV"
    • "QF_ABVFP"
    • "QF_ABVFPLRA"
    • "QF_ALIA"
    • "QF_ANIA"
    • "QF_AUFBV"
    • "QF_AUFBVFP"
    • "QF_AUFBVLIA"
    • "QF_AUFBVNIA"
    • "QF_AUFLIA"
    • "QF_AUFNIA"
    • "QF_AX"
    • "QF_BV"
    • "QF_BVFP"
    • "QF_BVFPLRA"
    • "QF_DT"
    • "QF_FP"
    • "QF_FPLRA"
    • "QF_IDL"
    • "QF_LIA"
    • "QF_LIRA"
    • "QF_LRA"
    • "QF_NIA"
    • "QF_NIRA"
    • "QF_NRA"
    • "QF_RDL"
    • "QF_S"
    • "QF_SLIA"
    • "QF_SNIA"
    • "QF_UF"
    • "QF_UFBV"
    • "QF_UFBVDT"
    • "QF_UFBVLIA"
    • "QF_UFDT"
    • "QF_UFDTLIA"
    • "QF_UFDTLIRA"
    • "QF_UFDTNIA"
    • "QF_UFFP"
    • "QF_UFFPDTNIRA"
    • "QF_UFIDL"
    • "QF_UFLIA"
    • "QF_UFLRA"
    • "QF_UFNIA"
    • "QF_UFNRA"
    • "UF"
    • "UFBV"
    • "UFBVDT"
    • "UFBVFP"
    • "UFBVLIA"
    • "UFDT"
    • "UFDTLIA"
    • "UFDTLIRA"
    • "UFDTNIA"
    • "UFDTNIRA"
    • "UFFPDTNIRA"
    • "UFIDL"
    • "UFLIA"
    • "UFLRA"
    • "UFNIA"
    • "UFNRA"

    Divisions

    Type: array Default: []
    No Additional Items

    Each item of this array must be:

    Type: enum (of string)

    Must be one of:

    • "Arith"
    • "Bitvec"
    • "Equality"
    • "Equality+LinearArith"
    • "Equality+MachineArith"
    • "Equality+NonLinearArith"
    • "FPArith"
    • "QF_ADT+BitVec"
    • "QF_ADT+LinArith"
    • "QF_Bitvec"
    • "QF_Datatypes"
    • "QF_Equality"
    • "QF_Equality+Bitvec"
    • "QF_Equality+Bitvec+Arith"
    • "QF_Equality+LinearArith"
    • "QF_Equality+NonLinearArith"
    • "QF_FPArith"
    • "QF_LinearIntArith"
    • "QF_LinearRealArith"
    • "QF_NonLinearIntArith"
    • "QF_NonLinearRealArith"
    • "QF_Strings"

    Default: null

    Type: object
    No Additional Properties

    Arguments

    Type: array of string Default: []
    No Additional Items

    Each item of this array must be:

    Compa Starexec

    Type: boolean Default: false

    Experimental

    Type: boolean Default: false
    \ No newline at end of file diff --git a/web/hugo.toml b/web/hugo.toml index 41472338..8f3996ec 100644 --- a/web/hugo.toml +++ b/web/hugo.toml @@ -45,7 +45,12 @@ theme = 'smtcomp' URL = '/rules.pdf' weight = 10 +[[menu.year]] + name = 'Solver Submission' + pageRef = 'solver_submission' + weight = 20 + [[menu.year]] name = 'Model Validation Track' pageRef = 'model' - weight = 20 \ No newline at end of file + weight = 30 \ No newline at end of file diff --git a/web/redirects/benchmark_submission.html b/web/redirects/benchmark_submission.html index 3a7e4ea1..616e36fa 100644 --- a/web/redirects/benchmark_submission.html +++ b/web/redirects/benchmark_submission.html @@ -1 +1 @@ - \ No newline at end of file + diff --git a/web/redirects/index.html b/web/redirects/index.html index df4aa2ef..d1441d76 100644 --- a/web/redirects/index.html +++ b/web/redirects/index.html @@ -1 +1 @@ - + diff --git a/web/redirects/introduction.html b/web/redirects/introduction.html index 3c365130..2ceebf05 100644 --- a/web/redirects/introduction.html +++ b/web/redirects/introduction.html @@ -1 +1 @@ - \ No newline at end of file + diff --git a/web/redirects/papers.html b/web/redirects/papers.html index bb14ab90..dd35c864 100644 --- a/web/redirects/papers.html +++ b/web/redirects/papers.html @@ -1 +1 @@ - \ No newline at end of file + diff --git a/web/redirects/previous.html b/web/redirects/previous.html index 261ad04f..a6c602b2 100644 --- a/web/redirects/previous.html +++ b/web/redirects/previous.html @@ -1 +1 @@ - \ No newline at end of file + diff --git a/web/redirects/publications.html b/web/redirects/publications.html index f2647f8c..8e14d2b6 100644 --- a/web/redirects/publications.html +++ b/web/redirects/publications.html @@ -1 +1 @@ - \ No newline at end of file + diff --git a/web/resources/_gen/assets/scss/css/main.scss_2bceb0f0a412527b058634b2e208c849.json b/web/resources/_gen/assets/scss/css/main.scss_2bceb0f0a412527b058634b2e208c849.json index c5b6932c..8852b179 100644 --- a/web/resources/_gen/assets/scss/css/main.scss_2bceb0f0a412527b058634b2e208c849.json +++ b/web/resources/_gen/assets/scss/css/main.scss_2bceb0f0a412527b058634b2e208c849.json @@ -1 +1,7 @@ -{"Target":"css/main.8ea09380f449117c7c3ad4bf4c1d61b383e5460340799fd4b0fa30a854dc6543.css","MediaType":"text/css","Data":{"Integrity":"sha256-jqCTgPRJEXx8OtS/TB1hs4PlRgNAeZ/UsPowqFTcZUM="}} \ No newline at end of file +{ + "Target": "css/main.8ea09380f449117c7c3ad4bf4c1d61b383e5460340799fd4b0fa30a854dc6543.css", + "MediaType": "text/css", + "Data": { + "Integrity": "sha256-jqCTgPRJEXx8OtS/TB1hs4PlRgNAeZ/UsPowqFTcZUM=" + } +} diff --git a/web/themes/smtcomp/assets/css/main.css b/web/themes/smtcomp/assets/css/main.css index 4f151afb..d77cdc62 100644 --- a/web/themes/smtcomp/assets/css/main.css +++ b/web/themes/smtcomp/assets/css/main.css @@ -1,18 +1,86 @@ /* Jekyll theme dinky start */ -html, body, div, span, applet, object, iframe, -h1, h2, h3, h4, h5, h6, p, blockquote, pre, -a, abbr, acronym, address, big, cite, code, -del, dfn, em, img, ins, kbd, q, s, samp, -small, strike, strong, sub, sup, tt, var, -b, u, i, center, -dl, dt, dd, ol, ul, li, -fieldset, form, label, legend, -table, caption, tbody, tfoot, thead, tr, th, td, -article, aside, canvas, details, embed, -figure, figcaption, footer, header, hgroup, -menu, nav, output, ruby, section, summary, -time, mark, audio, video { +html, +body, +div, +span, +applet, +object, +iframe, +h1, +h2, +h3, +h4, +h5, +h6, +p, +blockquote, +pre, +a, +abbr, +acronym, +address, +big, +cite, +code, +del, +dfn, +em, +img, +ins, +kbd, +q, +s, +samp, +small, +strike, +strong, +sub, +sup, +tt, +var, +b, +u, +i, +center, +dl, +dt, +dd, +ol, +ul, +li, +fieldset, +form, +label, +legend, +table, +caption, +tbody, +tfoot, +thead, +tr, +th, +td, +article, +aside, +canvas, +details, +embed, +figure, +figcaption, +footer, +header, +hgroup, +menu, +nav, +output, +ruby, +section, +summary, +time, +mark, +audio, +video { margin: 0; padding: 0; border: 0; @@ -20,81 +88,92 @@ time, mark, audio, video { vertical-align: baseline; } - /* Base text styles */ body { - padding:10px 50px 0 0; - font-family:"Helvetica Neue", Helvetica, Arial, sans-serif; - font-size: 14px; - color: #232323; - background-color: #FBFAF7; - margin: 0; - line-height: 1.8em; - -webkit-font-smoothing: antialiased; - + padding: 10px 50px 0 0; + font-family: "Helvetica Neue", Helvetica, Arial, sans-serif; + font-size: 14px; + color: #232323; + background-color: #fbfaf7; + margin: 0; + line-height: 1.8em; + -webkit-font-smoothing: antialiased; } -h1, h2, h3, h4, h5, h6 { - color:#232323; - margin:36px 0 10px; +h1, +h2, +h3, +h4, +h5, +h6 { + color: #232323; + margin: 36px 0 10px; } -p, ul, ol, table, dl { - margin:0 0 22px; +p, +ul, +ol, +table, +dl { + margin: 0 0 22px; } -h1, h2, h3 { - font-family: Arvo, Monaco, serif; - line-height:1.3; - font-weight: normal; +h1, +h2, +h3 { + font-family: Arvo, Monaco, serif; + line-height: 1.3; + font-weight: normal; } -h1,h2, h3 { - display: block; - border-bottom: 1px solid #ccc; - padding-bottom: 5px; +h1, +h2, +h3 { + display: block; + border-bottom: 1px solid #ccc; + padding-bottom: 5px; } h1 { - font-size: 30px; + font-size: 30px; } h2 { - font-size: 24px; + font-size: 24px; } h3 { - font-size: 18px; + font-size: 18px; } -h4, h5, h6 { - font-family: Arvo, Monaco, serif; - font-weight: 700; +h4, +h5, +h6 { + font-family: Arvo, Monaco, serif; + font-weight: 700; } a { - color:#C30000; - font-weight:200; - text-decoration:none; + color: #c30000; + font-weight: 200; + text-decoration: none; } - - a:hover { - text-decoration: underline; + text-decoration: underline; } a small { - font-size: 12px; + font-size: 12px; } em { - font-style: italic; + font-style: italic; } strong { - font-weight:700; + font-weight: 700; } ul { @@ -115,8 +194,11 @@ blockquote { font-style: italic; } -dl, dt, dd, dl p { - color: #444; +dl, +dt, +dd, +dl p { + color: #444; } dl dt { @@ -134,10 +216,10 @@ dl p { } hr { - border:0; - background:#ccc; - height:1px; - margin:0 0 24px; + border: 0; + background: #ccc; + height: 1px; + margin: 0 0 24px; } /* Images */ @@ -151,7 +233,8 @@ img { border: 1px solid #ccc; } -p img, .emoji { +p img, +.emoji { display: inline; margin: 0; padding: 0; @@ -162,305 +245,319 @@ p img, .emoji { /* Code blocks */ -code, pre { - font-family: Monaco, "Bitstream Vera Sans Mono", "Lucida Console", Terminal, monospace; - color:#000; - font-size:14px; +code, +pre { + font-family: Monaco, "Bitstream Vera Sans Mono", "Lucida Console", Terminal, + monospace; + color: #000; + font-size: 14px; } pre { - padding: 4px 12px; - background: #FDFEFB; - border-radius:4px; - border:1px solid #D7D8C8; + padding: 4px 12px; + background: #fdfefb; + border-radius: 4px; + border: 1px solid #d7d8c8; overflow: auto; overflow-y: hidden; - margin-bottom: 32px; + margin-bottom: 32px; } - /* Tables */ table { - width:100%; + width: 100%; } table { border: 1px solid #ccc; margin-bottom: 32px; text-align: left; - } +} th { - font-family: 'Arvo', Helvetica, Arial, sans-serif; - font-size: 18px; - font-weight: normal; + font-family: "Arvo", Helvetica, Arial, sans-serif; + font-size: 18px; + font-weight: normal; padding: 10px; background: #232323; - color: #FDFEFB; - } + color: #fdfefb; +} td { padding: 10px; - background: #ccc; - } - + background: #ccc; +} /* Wrapper */ .wrapper { - width:960px; + width: 960px; } - /* Header */ header { - background-color: #171717; - color: #FDFDFB; - width:170px; - float:left; - position:fixed; - border: 1px solid #000; - -webkit-border-top-right-radius: 4px; - -webkit-border-bottom-right-radius: 4px; - -moz-border-radius-topright: 4px; - -moz-border-radius-bottomright: 4px; - border-top-right-radius: 4px; - border-bottom-right-radius: 4px; - padding: 34px 25px 22px 50px; - margin: 30px 25px 0 0; - -webkit-font-smoothing: antialiased; + background-color: #171717; + color: #fdfdfb; + width: 170px; + float: left; + position: fixed; + border: 1px solid #000; + -webkit-border-top-right-radius: 4px; + -webkit-border-bottom-right-radius: 4px; + -moz-border-radius-topright: 4px; + -moz-border-radius-bottomright: 4px; + border-top-right-radius: 4px; + border-bottom-right-radius: 4px; + padding: 34px 25px 22px 50px; + margin: 30px 25px 0 0; + -webkit-font-smoothing: antialiased; } p.header { - font-size: 16px; + font-size: 16px; } h1.header { - font-family: Arvo, sans-serif; - font-size: 30px; - font-weight: 300; - line-height: 1.3em; - border-bottom: none; - margin-top: 0; + font-family: Arvo, sans-serif; + font-size: 30px; + font-weight: 300; + line-height: 1.3em; + border-bottom: none; + margin-top: 0; } - -h1.header, a.header, a.name, header a{ - color: #fff; +h1.header, +a.header, +a.name, +header a { + color: #fff; } a.hl { - font-weight: bold; + font-weight: bold; } a.header { - text-decoration: underline; + text-decoration: underline; } a.name { - white-space: nowrap; + white-space: nowrap; } header ul { - list-style:none; - padding:0; + list-style: none; + padding: 0; } header li { - list-style-type: none; - width:132px; - height:15px; - margin-bottom: 12px; - line-height: 1em; - padding: 6px 6px 6px 7px; - - background: #AF0011; - background: -moz-linear-gradient(top, #AF0011 0%, #820011 100%); - background: -webkit-gradient(linear, left top, left bottom, color-stop(0%,#f8f8f8), color-stop(100%,#dddddd)); - background: -webkit-linear-gradient(top, #AF0011 0%,#820011 100%); - background: -o-linear-gradient(top, #AF0011 0%,#820011 100%); - background: -ms-linear-gradient(top, #AF0011 0%,#820011 100%); - background: linear-gradient(to top, #AF0011 0%,#820011 100%); - - border-radius:4px; - border:1px solid #0D0D0D; - - -webkit-box-shadow: inset 0px 1px 1px 0 rgba(233,2,38, 1); - box-shadow: inset 0px 1px 1px 0 rgba(233,2,38, 1); - + list-style-type: none; + width: 132px; + height: 15px; + margin-bottom: 12px; + line-height: 1em; + padding: 6px 6px 6px 7px; + + background: #af0011; + background: -moz-linear-gradient(top, #af0011 0%, #820011 100%); + background: -webkit-gradient( + linear, + left top, + left bottom, + color-stop(0%, #f8f8f8), + color-stop(100%, #dddddd) + ); + background: -webkit-linear-gradient(top, #af0011 0%, #820011 100%); + background: -o-linear-gradient(top, #af0011 0%, #820011 100%); + background: -ms-linear-gradient(top, #af0011 0%, #820011 100%); + background: linear-gradient(to top, #af0011 0%, #820011 100%); + + border-radius: 4px; + border: 1px solid #0d0d0d; + + -webkit-box-shadow: inset 0px 1px 1px 0 rgba(233, 2, 38, 1); + box-shadow: inset 0px 1px 1px 0 rgba(233, 2, 38, 1); } header li:hover { - background: #C3001D; - background: -moz-linear-gradient(top, #C3001D 0%, #950119 100%); - background: -webkit-gradient(linear, left top, left bottom, color-stop(0%,#f8f8f8), color-stop(100%,#dddddd)); - background: -webkit-linear-gradient(top, #C3001D 0%,#950119 100%); - background: -o-linear-gradient(top, #C3001D 0%,#950119 100%); - background: -ms-linear-gradient(top, #C3001D 0%,#950119 100%); - background: linear-gradient(to top, #C3001D 0%,#950119 100%); + background: #c3001d; + background: -moz-linear-gradient(top, #c3001d 0%, #950119 100%); + background: -webkit-gradient( + linear, + left top, + left bottom, + color-stop(0%, #f8f8f8), + color-stop(100%, #dddddd) + ); + background: -webkit-linear-gradient(top, #c3001d 0%, #950119 100%); + background: -o-linear-gradient(top, #c3001d 0%, #950119 100%); + background: -ms-linear-gradient(top, #c3001d 0%, #950119 100%); + background: linear-gradient(to top, #c3001d 0%, #950119 100%); } a.buttons { - -webkit-font-smoothing: antialiased; - background: url(../images/arrow-down.png) no-repeat; - font-weight: normal; - text-shadow: rgba(0, 0, 0, 0.4) 0 -1px 0; - padding: 2px 2px 2px 22px; - height: 30px; + -webkit-font-smoothing: antialiased; + background: url(../images/arrow-down.png) no-repeat; + font-weight: normal; + text-shadow: rgba(0, 0, 0, 0.4) 0 -1px 0; + padding: 2px 2px 2px 22px; + height: 30px; } a.github { - background: url(../images/octocat-small.png) no-repeat 1px; + background: url(../images/octocat-small.png) no-repeat 1px; } a.buttons:hover { - color: #fff; - text-decoration: none; + color: #fff; + text-decoration: none; } - /* Section - for main page content */ section { - width:650px; - float:right; - padding-bottom:50px; + width: 650px; + float: right; + padding-bottom: 50px; } - /* Footer */ footer { - width:170px; - float:left; - position:fixed; - bottom:10px; - padding-left: 50px; + width: 170px; + float: left; + position: fixed; + bottom: 10px; + padding-left: 50px; } @media print, screen and (max-width: 960px) { - div.wrapper { - width:auto; - margin:0; + width: auto; + margin: 0; } - header, section, footer { - float:none; - position:static; - width:auto; + header, + section, + footer { + float: none; + position: static; + width: auto; } - footer { - border-top: 1px solid #ccc; - margin:0 84px 0 50px; - padding:0; - } + footer { + border-top: 1px solid #ccc; + margin: 0 84px 0 50px; + padding: 0; + } header { - padding-right:320px; + padding-right: 320px; } section { - padding:20px 84px 20px 50px; - margin:0 0 20px; + padding: 20px 84px 20px 50px; + margin: 0 0 20px; } header a small { - display:inline; + display: inline; } header ul { - position:absolute; - right:130px; - top:84px; + position: absolute; + right: 130px; + top: 84px; } } @media print, screen and (max-width: 720px) { body { - word-wrap:break-word; + word-wrap: break-word; } header { - padding:10px 20px 0; - margin-right: 0; + padding: 10px 20px 0; + margin-right: 0; } - section { - padding:10px 0 10px 20px; - margin:0 0 30px; + section { + padding: 10px 0 10px 20px; + margin: 0 0 30px; } - footer { - margin: 0 0 0 30px; - } + footer { + margin: 0 0 0 30px; + } - header ul, header p.view { - position:static; + header ul, + header p.view { + position: static; } } @media print, screen and (max-width: 480px) { - header ul li.download { - display:none; + display: none; } - footer { - margin: 0 0 0 20px; - } - - footer a{ - display:block; - } + footer { + margin: 0 0 0 20px; + } + footer a { + display: block; + } } @media print { body { - padding:0.4in; - font-size:12pt; - color:#444; + padding: 0.4in; + font-size: 12pt; + color: #444; } } /* Jekyll theme dinky end */ -h4 { margin-top: 20px; } +h4 { + margin-top: 20px; +} -table.sorted thead tr th, table.sorted tfoot tr th { - /* background-color: #e6EEEE; */ - padding: 4px; - padding-right: 40px; - border: 1px solid black; - vertical-align: middle; +table.sorted thead tr th, +table.sorted tfoot tr th { + /* background-color: #e6EEEE; */ + padding: 4px; + padding-right: 40px; + border: 1px solid black; + vertical-align: middle; } table.sorted thead tr .header { - background-image: url(https://smt-comp.github.io/img/bg.gif); - background-repeat: no-repeat; - background-position: center right; - cursor: pointer; + background-image: url(https://smt-comp.github.io/img/bg.gif); + background-repeat: no-repeat; + background-position: center right; + cursor: pointer; } table.sorted tbody td { - color: #3D3D3D; - padding: 4px; - background-color: #FFF; - vertical-align: top; + color: #3d3d3d; + padding: 4px; + background-color: #fff; + vertical-align: top; } table.sorted tbody tr.odd td { - background-color:#F0F0F6; + background-color: #f0f0f6; } table.sorted thead tr .headerSortUp { - background-image: url(https://smt-comp.github.io/img/asc.gif); + background-image: url(https://smt-comp.github.io/img/asc.gif); } table.sorted thead tr .headerSortDown { - background-image: url(https://smt-comp.github.io/img/desc.gif); + background-image: url(https://smt-comp.github.io/img/desc.gif); } -table.sorted thead tr .headerSortDown, table.sorted thead tr .headerSortUp { -background-color: #d5d9d9; -color: #000; +table.sorted thead tr .headerSortDown, +table.sorted thead tr .headerSortUp { + background-color: #d5d9d9; + color: #000; } table { @@ -563,7 +660,8 @@ span.non-competing { } } -sub, sup { +sub, +sup { font-size: 75%; line-height: 0; position: relative; @@ -588,4 +686,4 @@ ul li ul { ul li p { margin: 0; -} \ No newline at end of file +} diff --git a/web/themes/smtcomp/assets/js/main.js b/web/themes/smtcomp/assets/js/main.js index e2aac527..2f45f613 100644 --- a/web/themes/smtcomp/assets/js/main.js +++ b/web/themes/smtcomp/assets/js/main.js @@ -1 +1 @@ -console.log('This site was generated by Hugo.'); +console.log("This site was generated by Hugo."); diff --git a/web/themes/smtcomp/layouts/_default/result.html b/web/themes/smtcomp/layouts/_default/result.html index d9b8bbf5..165fcb09 100644 --- a/web/themes/smtcomp/layouts/_default/result.html +++ b/web/themes/smtcomp/layouts/_default/result.html @@ -17,8 +17,8 @@

    {{ .Params.division }} ({{ .Params.track }})

    Results were generated on {{ .Params.resultdate }}

    - Benchmarks: {{ .Params.n_benchmarks }}
    - Time Limit: {{ .Params.time_limit }} seconds
    + Benchmarks: {{ .Params.n_benchmarks }}
    + Time Limit: {{ .Params.time_limit }} seconds
    Memory Limit: {{ .Params.mem_limit }} GB

    diff --git a/web/themes/smtcomp/layouts/partials/menu.html b/web/themes/smtcomp/layouts/partials/menu.html index a3752af9..9e58f69f 100644 --- a/web/themes/smtcomp/layouts/partials/menu.html +++ b/web/themes/smtcomp/layouts/partials/menu.html @@ -23,8 +23,7 @@ - {{- end }} -

    +{{- end }} {{- define "partials/inline/menu/walk.html" }} {{- $page := .page }} diff --git a/web/themes/smtcomp/theme.toml b/web/themes/smtcomp/theme.toml index 34ecfc19..eb4a1292 100644 --- a/web/themes/smtcomp/theme.toml +++ b/web/themes/smtcomp/theme.toml @@ -2,4 +2,4 @@ name = 'SMT-COMP' description = 'SMT-COMP Theme' # The home page of the theme, where the source can be found -homepage = 'https://github.com/SMT-COMP/smt-comp.github.io' \ No newline at end of file +homepage = 'https://github.com/SMT-COMP/smt-comp.github.io'