diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index dc8c8f2e1..af80dfb72 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -11,6 +11,7 @@ RUN apt-get install -y --no-install-recommends bundler RUN apt-get install -y --no-install-recommends nodejs RUN apt-get install -y --no-install-recommends npm RUN apt-get install -y --no-install-recommends ditaa +RUN apt-get install -y --no-install-recommends libyaml-dev RUN apt-get clean autoclean RUN apt-get autoremove -y RUN rm -rf /var/lib/{apt, dpkg, cache, log} diff --git a/.github/workflows/regress.yml b/.github/workflows/regress.yml index 742d918b6..0cf5179ba 100644 --- a/.github/workflows/regress.yml +++ b/.github/workflows/regress.yml @@ -5,8 +5,124 @@ on: - main workflow_dispatch: jobs: - regress: + regress-smoke: runs-on: ubuntu-latest + steps: + - name: Clone Github Repo Action + uses: actions/checkout@v4 + - name: Setup apptainer + uses: eWaterCycle/setup-apptainer@v2.0.0 + - name: Get container from cache + id: cache-sif + uses: actions/cache@v3 + with: + path: .singularity/image.sif + key: ${{ hashFiles('container.def', 'bin/.container-tag') }} + - name: Get gems and node files from cache + id: cache-bundle-npm + uses: actions/cache@v3 + with: + path: | + .home/.gems + node_modules + key: ${{ hashFiles('Gemfile.lock') }}-${{ hashFiles('package-lock.json') }} + - if: ${{ steps.cache-sif.outputs.cache-hit != 'true' }} + name: Build container + run: ./bin/build_container + - name: Setup project + run: ./bin/setup + - name: Run smoke + run: ./do smoke + regress-gen-isa-manual: + runs-on: ubuntu-latest + needs: regress-smoke + env: + MANUAL_NAME: isa + VERSIONS: all + steps: + - name: Clone Github Repo Action + uses: actions/checkout@v4 + - name: Setup apptainer + uses: eWaterCycle/setup-apptainer@v2.0.0 + - name: Get container from cache + id: cache-sif + uses: actions/cache@v3 + with: + path: .singularity/image.sif + key: ${{ hashFiles('container.def', 'bin/.container-tag') }} + - name: Get gems and node files from cache + id: cache-bundle-npm + uses: actions/cache@v3 + with: + path: | + .home/.gems + node_modules + key: ${{ hashFiles('Gemfile.lock') }}-${{ hashFiles('package-lock.json') }} + - if: ${{ steps.cache-sif.outputs.cache-hit != 'true' }} + name: Build container + run: ./bin/build_container + - name: Generate HTML ISA manual + run: ./do gen:html_manual + regress-gen-ext-pdf: + runs-on: ubuntu-latest + needs: regress-smoke + env: + EXT: B + VERSION: latest + steps: + - name: Clone Github Repo Action + uses: actions/checkout@v4 + - name: Setup apptainer + uses: eWaterCycle/setup-apptainer@v2.0.0 + - name: Get container from cache + id: cache-sif + uses: actions/cache@v3 + with: + path: .singularity/image.sif + key: ${{ hashFiles('container.def', 'bin/.container-tag') }} + - name: Get gems and node files from cache + id: cache-bundle-npm + uses: actions/cache@v3 + with: + path: | + .home/.gems + node_modules + key: ${{ hashFiles('Gemfile.lock') }}-${{ hashFiles('package-lock.json') }} + - if: ${{ steps.cache-sif.outputs.cache-hit != 'true' }} + name: Build container + run: ./bin/build_container + - name: Generate extension PDF + run: ./do gen:ext_pdf + regress-gen-certificate: + runs-on: ubuntu-latest + needs: regress-smoke + steps: + - name: Clone Github Repo Action + uses: actions/checkout@v4 + - name: Setup apptainer + uses: eWaterCycle/setup-apptainer@v2.0.0 + - name: Get container from cache + id: cache-sif + uses: actions/cache@v3 + with: + path: .singularity/image.sif + key: ${{ hashFiles('container.def', 'bin/.container-tag') }} + - name: Get gems and node files from cache + id: cache-bundle-npm + uses: actions/cache@v3 + with: + path: | + .home/.gems + node_modules + key: ${{ hashFiles('Gemfile.lock') }}-${{ hashFiles('package-lock.json') }} + - if: ${{ steps.cache-sif.outputs.cache-hit != 'true' }} + name: Build container + run: ./bin/build_container + - name: Generate extension PDF + run: ./do gen:cert_model_pdf[MockCertificateModel] + regress-gen-profile: + runs-on: ubuntu-latest + needs: regress-smoke steps: - name: Clone Github Repo Action uses: actions/checkout@v4 @@ -29,7 +145,5 @@ jobs: - if: ${{ steps.cache-sif.outputs.cache-hit != 'true' }} name: Build container run: ./bin/build_container - - name: Setup project - run: ./bin/setup - - name: Run regression - run: ./do regress + - name: Generate extension PDF + run: ./do gen:profile[MockProfileRelease] \ No newline at end of file diff --git a/.rubocop.yml b/.rubocop.yml index ca2d2a8fd..3d27e703b 100644 --- a/.rubocop.yml +++ b/.rubocop.yml @@ -40,6 +40,10 @@ Metrics/AbcSize: Metrics/CyclomaticComplexity: Enabled: false +Metrics/ParameterLists: + Enabled: true + CountKeywordArgs: false + Metrics/PerceivedComplexity: Enabled: false diff --git a/Gemfile b/Gemfile index 8ad685012..2e3212fa3 100644 --- a/Gemfile +++ b/Gemfile @@ -4,24 +4,26 @@ ruby "3.2.3" source "https://rubygems.org" +gem "activesupport" gem "asciidoctor-diagram", "~> 2.2" gem "asciidoctor-pdf" gem "base64" gem "bigdecimal" gem "json_schemer", "~> 1.0" gem "minitest" -gem "ruby-progressbar", "~> 1.13" gem "pygments.rb" gem "rake", "~> 13.0" gem "rouge" +gem "ruby-progressbar", "~> 1.13" gem "treetop", "1.6.12" gem "ttfunk", "1.7" # needed to avoid having asciidoctor-pdf dependencies pulling in a buggy version of ttunk (1.8) gem "webrick" gem "yard" group :development do - gem "solargraph" - gem 'rubocop-minitest' - gem 'ruby-prof' + gem "debug" + gem "rubocop-minitest" + gem "ruby-prof" gem "ruby-prof-flamegraph" + gem "solargraph" end diff --git a/Gemfile.lock b/Gemfile.lock index f2664dee0..a7a369830 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -2,6 +2,19 @@ GEM remote: https://rubygems.org/ specs: Ascii85 (1.1.1) + activesupport (8.0.0) + base64 + benchmark (>= 0.3) + bigdecimal + concurrent-ruby (~> 1.0, >= 1.3.1) + connection_pool (>= 2.2.5) + drb + i18n (>= 1.6, < 2) + logger (>= 1.4.2) + minitest (>= 5.1) + securerandom (>= 0.3) + tzinfo (~> 2.0, >= 2.0.5) + uri (>= 0.13.1) addressable (2.8.7) public_suffix (>= 2.0.2, < 7.0) afm (0.2.2) @@ -31,12 +44,23 @@ GEM benchmark (0.3.0) bigdecimal (3.1.8) concurrent-ruby (1.3.3) + connection_pool (2.4.1) css_parser (1.17.1) addressable + debug (1.9.2) + irb (~> 1.10) + reline (>= 0.3.8) diff-lcs (1.5.1) + drb (2.2.1) e2mmap (0.1.0) hana (1.3.7) hashery (2.1.2) + i18n (1.14.6) + concurrent-ruby (~> 1.0) + io-console (0.7.2) + irb (1.14.1) + rdoc (>= 4.0.0) + reline (>= 0.4.2) jaro_winkler (1.6.0) json (2.7.2) json_schemer (1.0.3) @@ -48,6 +72,7 @@ GEM kramdown-parser-gfm (1.1.0) kramdown (~> 2.0) language_server-protocol (3.17.0.3) + logger (1.6.2) matrix (0.4.2) minitest (5.24.1) nokogiri (1.16.5-x86_64-linux) @@ -79,13 +104,19 @@ GEM prawn-templates (0.1.2) pdf-reader (~> 2.0) prawn (~> 2.2) + psych (5.2.0) + stringio public_suffix (6.0.0) pygments.rb (3.0.0) racc (1.8.0) rainbow (3.1.1) rake (13.2.1) rbs (2.8.4) + rdoc (6.8.1) + psych (>= 4.0.0) regexp_parser (2.9.2) + reline (0.5.11) + io-console (~> 0.5) reverse_markdown (2.1.1) nokogiri rexml (3.2.8) @@ -112,6 +143,7 @@ GEM ruby-prof (~> 0.13) ruby-progressbar (1.13.0) ruby-rc4 (0.1.5) + securerandom (0.4.0) simpleidn (0.2.3) solargraph (0.50.0) backport (~> 1.2) @@ -129,13 +161,17 @@ GEM thor (~> 1.0) tilt (~> 2.0) yard (~> 0.9, >= 0.9.24) + stringio (3.1.2) strscan (3.1.0) thor (1.3.1) tilt (2.3.0) treetop (1.6.12) polyglot (~> 0.3) ttfunk (1.7.0) + tzinfo (2.0.6) + concurrent-ruby (~> 1.0) unicode-display_width (2.5.0) + uri (1.0.2) webrick (1.8.1) yard (0.9.36) @@ -143,10 +179,12 @@ PLATFORMS x86_64-linux-gnu DEPENDENCIES + activesupport asciidoctor-diagram (~> 2.2) asciidoctor-pdf base64 bigdecimal + debug json_schemer (~> 1.0) minitest pygments.rb diff --git a/Rakefile b/Rakefile index b1b3feb28..441aad601 100644 --- a/Rakefile +++ b/Rakefile @@ -9,7 +9,8 @@ require "ruby-progressbar" require "yard" require "minitest/test_task" -require_relative $root / "lib" / "validate" +require_relative $root / "lib" / "specification" +require_relative $root / "lib" / "resolver" directory "#{$root}/.stamps" @@ -19,12 +20,18 @@ end directory "#{$root}/.stamps" -file "#{$root}/.stamps/dev_gems" => "#{$root}/.stamps" do - Dir.chdir($root) do - sh "bundle config set --local with development" - sh "bundle install" - FileUtils.touch "#{$root}/.stamps/dev_gems" - end +def arch_def_for(config_name) + Rake::Task["#{$root}/.stamps/resolve-#{config_name}.stamp"].invoke + + @arch_defs ||= {} + return @arch_defs[config_name] if @arch_defs.key?(config_name) + + @arch_defs[config_name] = + ArchDef.new( + config_name, + $root / "gen" / "resolved_arch" / config_name, + overlay_path: $root / "cfgs" / config_name / "arch_overlay" + ) end namespace :gen do @@ -35,6 +42,28 @@ namespace :gen do sh "bundle exec yard doc --yardopts idl.yardopts" end end + + desc "Resolve the standard in arch/, and write it to resolved_arch/" + task "resolved_arch" do + sh "#{$root}/.home/.venv/bin/python3 lib/yaml_resolver.py resolve arch resolved_arch" + end +end + +# rule to generate standard for any configurations with an overlay +rule %r{#{$root}/.stamps/resolve-.+\.stamp} => proc { |tname| + cfg_name = File.basename(tname, ".stamp").sub("resolve-", "") + arch_files = Dir.glob("#{$root}/arch/**/*.yaml") + overlay_files = Dir.glob("#{$root}/cfgs/#{cfg_name}/arch_overlay/**/*.yaml") + [ + "#{$root}/.stamps", + "#{$root}/lib/yaml_resolver.py" + ] + arch_files + overlay_files +} do |t| + cfg_name = File.basename(t.name, ".stamp").sub("resolve-", "") + sh "#{$root}/.home/.venv/bin/python3 lib/yaml_resolver.py merge arch cfgs/#{cfg_name}/arch_overlay gen/arch/#{cfg_name}" + sh "#{$root}/.home/.venv/bin/python3 lib/yaml_resolver.py resolve gen/arch/#{cfg_name} gen/resolved_arch/#{cfg_name}" + + FileUtils.touch t.name end namespace :serve do @@ -86,29 +115,23 @@ namespace :validate do end puts "All instruction encodings pass basic sanity tests" end - task schema: "gen:arch" do - validator = Validator.instance + task schema: "gen:resolved_arch" do puts "Checking arch files against schema.." - arch_files = Dir.glob("#{$root}/arch/**/*.yaml") - progressbar = ProgressBar.create(total: arch_files.size) - arch_files.each do |f| - progressbar.increment - validator.validate(f) - end - puts "All files validate against their schema" + Specification.new("#{$root}/resolved_arch").validate(show_progress: true) + puts "All files validate against their schema" end - task idl: ["gen:arch", "#{$root}/.stamps/arch-gen-_32.stamp", "#{$root}/.stamps/arch-gen-_64.stamp"] do + task idl: ["gen:resolved_arch", "#{$root}/.stamps/resolve-rv32.stamp", "#{$root}/.stamps/resolve-rv64.stamp"] do print "Parsing IDL code for RV32..." - arch_def_32 = arch_def_for("_32") + arch_def32 = arch_def_for("rv32") puts "done" - arch_def_32.type_check + arch_def32.type_check print "Parsing IDL code for RV64..." - arch_def_64 = arch_def_for("_64") + arch_def64 = arch_def_for("rv64") puts "done" - arch_def_64.type_check + arch_def64.type_check puts "All IDL passed type checking" end @@ -130,7 +153,6 @@ private :insert_warning "#{$root}/arch/csr/Zihpm/mhpmcounterN.layout", __FILE__ ] do |t| - puts "Generating #{Pathname.new(t.name).relative_path_from($root)}" erb = ERB.new(File.read($root / "arch/csr/Zihpm/mhpmcounterN.layout"), trim_mode: "-") erb.filename = "#{$root}/arch/csr/Zihpm/mhpmcounterN.layout" File.write(t.name, insert_warning(erb.result(binding), t.prerequisites.first)) @@ -139,7 +161,6 @@ private :insert_warning "#{$root}/arch/csr/Zihpm/mhpmcounterNh.layout", __FILE__ ] do |t| - puts "Generating #{Pathname.new(t.name).relative_path_from($root)}" erb = ERB.new(File.read($root / "arch/csr/Zihpm/mhpmcounterNh.layout"), trim_mode: "-") erb.filename = "#{$root}/arch/csr/Zihpm/mhpmcounterNh.layout" File.write(t.name, insert_warning(erb.result(binding), t.prerequisites.first)) @@ -148,7 +169,6 @@ private :insert_warning "#{$root}/arch/csr/Zihpm/mhpmeventN.layout", __FILE__ ] do |t| - puts "Generating #{Pathname.new(t.name).relative_path_from($root)}" erb = ERB.new(File.read($root / "arch/csr/Zihpm/mhpmeventN.layout"), trim_mode: "-") erb.filename = "#{$root}/arch/csr/Zihpm/mhpmeventN.layout" File.write(t.name, insert_warning(erb.result(binding), t.prerequisites.first)) @@ -157,7 +177,6 @@ private :insert_warning "#{$root}/arch/csr/Zihpm/mhpmeventNh.layout", __FILE__ ] do |t| - puts "Generating #{Pathname.new(t.name).relative_path_from($root)}" erb = ERB.new(File.read($root / "arch/csr/Zihpm/mhpmeventNh.layout"), trim_mode: "-") erb.filename = "#{$root}/arch/csr/Zihpm/mhpmeventNh.layout" File.write(t.name, insert_warning(erb.result(binding), t.prerequisites.first)) @@ -166,7 +185,6 @@ private :insert_warning "#{$root}/arch/csr/Zihpm/hpmcounterN.layout", __FILE__ ] do |t| - puts "Generating #{Pathname.new(t.name).relative_path_from($root)}" erb = ERB.new(File.read($root / "arch/csr/Zihpm/hpmcounterN.layout"), trim_mode: "-") erb.filename = "#{$root}/arch/csr/Zihpm/hpmcounterN.layout" File.write(t.name, insert_warning(erb.result(binding), t.prerequisites.first)) @@ -175,7 +193,6 @@ private :insert_warning "#{$root}/arch/csr/Zihpm/hpmcounterNh.layout", __FILE__ ] do |t| - puts "Generating #{Pathname.new(t.name).relative_path_from($root)}" erb = ERB.new(File.read($root / "arch/csr/Zihpm/hpmcounterNh.layout"), trim_mode: "-") erb.filename = "#{$root}/arch/csr/Zihpm/hpmcounterNh.layout" File.write(t.name, insert_warning(erb.result(binding), t.prerequisites.first)) @@ -187,7 +204,6 @@ end "#{$root}/arch/csr/I/pmpaddrN.layout", __FILE__ ] do |t| - puts "Generating #{Pathname.new(t.name).relative_path_from($root)}" erb = ERB.new(File.read($root / "arch/csr/I/pmpaddrN.layout"), trim_mode: "-") erb.filename = "#{$root}/arch/csr/I/pmpaddrN.layout" File.write(t.name, insert_warning(erb.result(binding), t.prerequisites.first)) @@ -199,7 +215,6 @@ end "#{$root}/arch/csr/I/pmpcfgN.layout", __FILE__ ] do |t| - puts "Generating #{Pathname.new(t.name).relative_path_from($root)}" erb = ERB.new(File.read($root / "arch/csr/I/pmpcfgN.layout"), trim_mode: "-") erb.filename = "#{$root}/arch/csr/I/pmpcfgN.layout" File.write(t.name, insert_warning(erb.result(binding), t.prerequisites.first)) @@ -210,7 +225,6 @@ file "#{$root}/arch/csr/I/mcounteren.yaml" => [ "#{$root}/arch/csr/I/mcounteren.layout", __FILE__ ] do |t| - puts "Generating #{Pathname.new(t.name).relative_path_from($root)}" erb = ERB.new(File.read($root / "arch/csr/I/mcounteren.layout"), trim_mode: "-") erb.filename = "#{$root}/arch/csr/I/mcounteren.layout" File.write(t.name, insert_warning(erb.result(binding), t.prerequisites.first)) @@ -220,7 +234,6 @@ file "#{$root}/arch/csr/S/scounteren.yaml" => [ "#{$root}/arch/csr/S/scounteren.layout", __FILE__ ] do |t| - puts "Generating #{Pathname.new(t.name).relative_path_from($root)}" erb = ERB.new(File.read($root / "arch/csr/S/scounteren.layout"), trim_mode: "-") erb.filename = "#{$root}/arch/csr/S/scounteren.layout" File.write(t.name, insert_warning(erb.result(binding), t.prerequisites.first)) @@ -230,7 +243,6 @@ file "#{$root}/arch/csr/H/hcounteren.yaml" => [ "#{$root}/arch/csr/H/hcounteren.layout", __FILE__ ] do |t| - puts "Generating #{Pathname.new(t.name).relative_path_from($root)}" erb = ERB.new(File.read($root / "arch/csr/H/hcounteren.layout"), trim_mode: "-") erb.filename = "#{$root}/arch/csr/H/hcounteren.layout" File.write(t.name, insert_warning(erb.result(binding), t.prerequisites.first)) @@ -240,7 +252,6 @@ file "#{$root}/arch/csr/Zicntr/mcountinhibit.yaml" => [ "#{$root}/arch/csr/Zicntr/mcountinhibit.layout", __FILE__ ] do |t| - puts "Generating #{Pathname.new(t.name).relative_path_from($root)}" erb = ERB.new(File.read($root / "arch/csr/Zicntr/mcountinhibit.layout"), trim_mode: "-") erb.filename = "#{$root}/arch/csr/Zicntr/mcountinhibit.layout" File.write(t.name, insert_warning(erb.result(binding), t.prerequisites.first)) diff --git a/arch/certificate_model/MC100.yaml b/arch/certificate_model/MC100.yaml index a182de120..a6f178bd2 100644 --- a/arch/certificate_model/MC100.yaml +++ b/arch/certificate_model/MC100.yaml @@ -16,7 +16,7 @@ base: 32 revision_history: - revision: "0.7.0" - date: 2024-07-29 + date: "2024-07-29" changes: - First version after moving non-microcontroller content in this document to a new document called "RISC-V CRDs (Certification Requirement Documents)" @@ -27,7 +27,7 @@ revision_history: - Added requirements for WFI instruction - Added requirements related to msip memory-mapped register - revision: "0.6.0" - date: 2024-07-11 + date: "2024-07-11" changes: - Supporting multiple MC versions to support customers wanting to certify existing microcontrollers not using the latest version of ratified standards. - Changed versioning scheme to use major.minor.patch instead of 3-digit major & minor. @@ -42,28 +42,28 @@ revision_history: - Added more options for interrupts - Moved non-microcontroller content in this document to a new document called "RISC-V Certification Plans" - revision: "0.5.0" - date: 2024-06-03 + date: "2024-06-03" changes: - Renamed to "RISC-V Microcontroller Certification Plan" based on Jason's recommendation - Added mvendorid, marchid, mimpid, and mhardid read-only priv CSRs because Allen pointed out these are mandatory in M-mode v1.13 (probably older versions too, haven't looked yet). - Added table showing mapping of MC versions to associated RISC-V specifications - revision: "0.4.0" - date: 2024-06-03 + date: "2024-06-03" changes: - Added M-mode instruction requirements - Made Zicntr MANDATORY due to very low cost for implementations to support (in the spirit of minimizing options). - Removed OPT-CNTR-PREC since minstret and mcycle must be a full 64 bits to be standard-compliant. - revision: "0.3.0" - date: 2024-05-25 + date: "2024-05-25" changes: - Includes Zicntr as OPTIONAL and then has only 32-bit counters for instret and cycle. - revision: "0.2.0" - date: 2024-05-20 + date: "2024-05-20" changes: - Very early draft - revision: "0.1.0" - date: 2024-05-16 + date: "2024-05-16" changes: - Initial version diff --git a/arch/certificate_model/MockCertificateModel.yaml b/arch/certificate_model/MockCertificateModel.yaml index 2f19b00b5..afe4e7bc0 100644 --- a/arch/certificate_model/MockCertificateModel.yaml +++ b/arch/certificate_model/MockCertificateModel.yaml @@ -17,11 +17,11 @@ versions: revision_history: - revision: "0.1.0" - date: 2024-10-04 + date: "2024-10-04" changes: - Created to test CRDs - revision: "0.2.0" - date: 2024-10-05 + date: "2024-10-05" changes: - Also created to test CRDs @@ -40,8 +40,7 @@ debug_manual_revision: "0.13.2" # XXX - Remove version information since specifying priv/unpriv ISA manual should imply this. extensions: $inherits: - - "profile_release/MockProfileRelease.yaml#/MockProfileRelease/profiles/MP-U-64/extensions" - - "profile_release/MockProfileRelease.yaml#/MockProfileRelease/profiles/MP-S-64/extensions" + - "profile/MP-S-64.yaml#/extensions" I: note: Just added this note to I extension MockExt: diff --git a/arch/csr/vstvec.yaml b/arch/csr/vstvec.yaml index 1d77e20b4..601f296c7 100644 --- a/arch/csr/vstvec.yaml +++ b/arch/csr/vstvec.yaml @@ -39,8 +39,10 @@ fields: When Vectored, asynchronous interrupts jump to (`vstvec.BASE` << 2 + `vscause.CAUSE`*4) while synchronous exceptions continue to jump to (`vstvec.BASE` << 2). type: RW-R sw_write(csr_value): | - if (csr_value.MODE == 0 || csr_value.MODE == 1) { - return csr_value.MODE; + if (VSTVEC_MODE_DIRECT && csr_value.MODE == 0) { + return 0; + } else if (VSTVEC_MODE_VECTORED && csr_value.MODE == 1) { + return 1; } else { return UNDEFINED_LEGAL_DETERMINISTIC; } diff --git a/arch/ext/C.yaml b/arch/ext/C.yaml index 5321d7d90..7f0fa175a 100644 --- a/arch/ext/C.yaml +++ b/arch/ext/C.yaml @@ -12,7 +12,7 @@ doc_license: name: Creative Commons Attribution 4.0 International License url: https://creativecommons.org/licenses/by/4.0/ versions: -- version: "2.2.0" +- version: "2.0.0" state: ratified ratification_date: 2019-12 description: | diff --git a/arch/ext/H.yaml b/arch/ext/H.yaml index 0e8bb8bcd..58a8ba84f 100644 --- a/arch/ext/H.yaml +++ b/arch/ext/H.yaml @@ -311,6 +311,14 @@ params: When false `vstval` is written with 0 when an `IllegalInstruction` exception occurs. schema: type: boolean + REPORT_GPA_IN_HTVAL_ON_GUEST_PAGE_FAULT: + description: | + When true, `htval` is written with the Guest Physical Address, shifted right by 2, that + caused a `GuestPageFault` exception. + + When false, `htval` is written with0 when a `GuestPageFault` exception occurs. + schema: + type: boolean HCOUNTENABLE_EN: description: | Indicates which counters can delegated via `hcounteren` @@ -636,4 +644,18 @@ params: without raising a trap, in which case the EEI must provide a builtin. schema: type: boolean - default: true \ No newline at end of file + default: true + VSTVEC_MODE_DIRECT: + description: | + Whether or not `vstvec.MODE` supports Direct (0). + schema: + type: boolean + extra_validation: + assert STVEC_MODE_DIRECT || STVEC_MODE_VECTORED + VSTVEC_MODE_VECTORED: + description: | + Whether or not `stvec.MODE` supports Vectored (1). + schema: + type: boolean + extra_validation: + assert STVEC_MODE_DIRECT || STVEC_MODE_VECTORED \ No newline at end of file diff --git a/arch/ext/Q.yaml b/arch/ext/Q.yaml new file mode 100644 index 000000000..d75e74dbc --- /dev/null +++ b/arch/ext/Q.yaml @@ -0,0 +1,20 @@ +# yaml-language-server: $schema=../../schemas/ext_schema.json + +$schema: "ext_schema.json#" +kind: extension +name: Q +long_name: Quad-Precision Floating-Point +description: | + 128-bit quad-precision binary floating-point instructions compliant with the IEEE 754-2008 + arithmetic standard. + `Q` depends on the double-precision floating-point extension `D`. + With `Q`, the floating-point registers are extended to hold either a single, double, or quad-precision + floating-point value (FLEN=128). + The NaN-boxing is extended recursively to allow a single-precision value to be NaN-boxed inside + a double-precision value which is itself NaN-boxed inside a quad-precision value. +type: unprivileged +versions: +- version: "1.0.0" + state: ratified + ratification_date: null + requires: D diff --git a/arch/ext/Sdext.yaml b/arch/ext/Sdext.yaml new file mode 100644 index 000000000..ff03373ea --- /dev/null +++ b/arch/ext/Sdext.yaml @@ -0,0 +1,13 @@ +# yaml-language-server: $schema=../../schemas/ext_schema.json + +$schema: "ext_schema.json#" +kind: extension +name: Sdext +long_name: Debug +description: | + Hart-visible portion of the debug spec. +type: privileged +versions: +- version: "1.0.0" + state: ratified + ratification_date: null diff --git a/arch/ext/Shgatpa.yaml b/arch/ext/Shgatpa.yaml new file mode 100644 index 000000000..6714fe537 --- /dev/null +++ b/arch/ext/Shgatpa.yaml @@ -0,0 +1,34 @@ +# yaml-language-server: $schema=../../schemas/ext_schema.json + +$schema: "ext_schema.json#" +kind: extension +name: Shgatpa +long_name: hgtap profile requirements +description: | + For each supported virtual memory scheme SvNN supported in + `satp`, the corresponding hgatp SvNNx4 mode must be supported. The + `hgatp` mode Bare must also be supported. + + [NOTE] + This extension was ratified with the RVA22 profiles. +type: privileged +versions: +- version: "1.0.0" + state: ratified + ratification_date: null + param_constraints: + SV32X4_TRANSLATION: + extra_validation: | + (SV32X4_TRANSLATION && ext?(:Sv32)) || (!SV32X4_TRANSLATION && !ext?(:Sv32)) + SV39X4_TRANSLATION: + extra_validation: | + (SV39X4_TRANSLATION && ext?(:Sv39)) || (!SV39X4_TRANSLATION && !ext?(:Sv39)) + SV48X4_TRANSLATION: + extra_validation: | + (SV48X4_TRANSLATION && ext?(:Sv48)) || (!SV48X4_TRANSLATION && !ext?(:Sv48)) + SV57X4_TRANSLATION: + extra_validation: | + (SV57X4_TRANSLATION && ext?(:Sv57)) || (!SV57X4_TRANSLATION && !ext?(:Sv57)) + GSTAGE_MODE_BARE: + schema: + const: true diff --git a/arch/ext/Shtvala.yaml b/arch/ext/Shtvala.yaml new file mode 100644 index 000000000..a409bd24e --- /dev/null +++ b/arch/ext/Shtvala.yaml @@ -0,0 +1,25 @@ +# yaml-language-server: $schema=../../schemas/ext_schema.json + +$schema: "ext_schema.json#" +kind: extension +name: Shtvala +long_name: htval profile requirements +description: | + htval must be written with the faulting virtual address + for load, store, and instruction page-fault, access-fault, and + misaligned exceptions, and for breakpoint exceptions other than + those caused by execution of the `ebreak` or `c.ebreak` instructions. + For virtual-instruction and illegal-instruction exceptions, htval must be written with the + faulting instruction. + + [NOTE] + This extension was ratified with the RVA22 profiles. +type: privileged +versions: +- version: "1.0.0" + state: ratified + ratification_date: null + param_constraints: + REPORT_GPA_IN_HTVAL_ON_GUEST_PAGE_FAULT: + schema: + const: true diff --git a/arch/ext/Shvstvala.yaml b/arch/ext/Shvstvala.yaml new file mode 100644 index 000000000..00efb7bbc --- /dev/null +++ b/arch/ext/Shvstvala.yaml @@ -0,0 +1,55 @@ +# yaml-language-server: $schema=../../schemas/ext_schema.json + +$schema: "ext_schema.json#" +kind: extension +name: Shvstvala +long_name: vstval profile requirements +description: | + vstval must be written with the faulting virtual address + for load, store, and instruction page-fault, access-fault, and + misaligned exceptions, and for breakpoint exceptions other than + those caused by execution of the `ebreak` or `c.ebreak` instructions. + For virtual-instruction and illegal-instruction exceptions, vstval must be written with the + faulting instruction. + + [NOTE] + This extension was ratified with the RVA22 profiles. +type: privileged +versions: +- version: "1.0.0" + state: ratified + ratification_date: null + param_constraints: + REPORT_VA_IN_VSTVAL_ON_BREAKPOINT: + schema: + const: true + REPORT_VA_IN_VSTVAL_ON_LOAD_MISALIGNED: + schema: + const: true + REPORT_VA_IN_VSTVAL_ON_STORE_AMO_MISALIGNED: + schema: + const: true + REPORT_VA_IN_VSTVAL_ON_INSTRUCTION_MISALIGNED: + schema: + const: true + REPORT_VA_IN_VSTVAL_ON_LOAD_ACCESS_FAULT: + schema: + const: true + REPORT_VA_IN_VSTVAL_ON_STORE_AMO_ACCESS_FAULT: + schema: + const: true + REPORT_VA_IN_VSTVAL_ON_INSTRUCTION_ACCESS_FAULT: + schema: + const: true + REPORT_VA_IN_VSTVAL_ON_LOAD_PAGE_FAULT: + schema: + const: true + REPORT_VA_IN_VSTVAL_ON_STORE_AMO_PAGE_FAULT: + schema: + const: true + REPORT_VA_IN_VSTVAL_ON_INSTRUCTION_PAGE_FAULT: + schema: + const: true + REPORT_ENCODING_IN_VSTVAL_ON_ILLEGAL_INSTRUCTION: + schema: + const: true diff --git a/arch/ext/Shvstvecd.yaml b/arch/ext/Shvstvecd.yaml new file mode 100644 index 000000000..39483a42f --- /dev/null +++ b/arch/ext/Shvstvecd.yaml @@ -0,0 +1,22 @@ +# yaml-language-server: $schema=../../schemas/ext_schema.json + +$schema: "ext_schema.json#" +kind: extension +name: Shvstvecd +long_name: vstvec profile requirements +description: | + `vstvec.MODE` must be capable of holding the value 0 (Direct). + When `vstvec.MODE`=Direct, `vstvec.BASE` must be capable of holding + any valid four-byte-aligned address. + + [NOTE] + This extension was ratified with the RVA22 profiles. +type: privileged +versions: +- version: "1.0.0" + state: ratified + ratification_date: null + param_constraints: + VSTVEC_MODE_DIRECT: + schema: + const: true diff --git a/arch/ext/Smdbltrp.yaml b/arch/ext/Smdbltrp.yaml new file mode 100644 index 000000000..40183276d --- /dev/null +++ b/arch/ext/Smdbltrp.yaml @@ -0,0 +1,22 @@ +# yaml-language-server: $schema=../../schemas/ext_schema.json + +$schema: "ext_schema.json#" +kind: extension +name: Smdbltrp +long_name: Double trap +description: | + The `Smdbltrp` extension addresses a double trap in M-mode. + When the `Smrnmi` extension is implemented, it enables invocation of the RNMI handler on a + double trap in M-mode to handle the critical error. + If the `Smrnmi` extension is not implemented or if a double trap occurs during the RNMI + handler’s execution, this extension helps transition the hart to a critical error state and + enables signaling the critical error to the platform. + + To improve error diagnosis and resolution, this extension supports debugging harts in a critical + error state. The extension introduces a mechanism to enter Debug Mode instead of asserting a + critical-error signal to the platform when the hart is in a critical error state. +type: privileged +versions: +- version: "1.0.0" + state: ratified + ratification_date: null diff --git a/arch/ext/Smrnmi.yaml b/arch/ext/Smrnmi.yaml new file mode 100644 index 000000000..90802b790 --- /dev/null +++ b/arch/ext/Smrnmi.yaml @@ -0,0 +1,21 @@ +# yaml-language-server: $schema=../../schemas/ext_schema.json + +$schema: "ext_schema.json#" +kind: extension +name: Smrnmi +long_name: Resumable Non-Maskable Interrupts +description: | + The base machine-level architecture supports only unresumable non-maskable interrupts (UNMIs), + where the NMI jumps to a handler in machine mode, overwriting the current `mepc` and `mcause` + register values. + If the hart had been executing machine-mode code in a trap handler, the previous values in `mepc` + and `mcause` would not be recoverable and so execution is not generally resumable. + + The `Smrnmi` extension adds support for resumable non-maskable interrupts (RNMIs) to RISC-V. + The extension adds four new CSRs (`mnepc`, `mncause`, `mnstatus`, and `mnscratch`) to hold the + interrupted state, and one new instruction, `mnret`, to resume from the RNMI handler. +type: unprivileged +versions: +- version: "1.0.0" + state: ratified + ratification_date: null diff --git a/arch/ext/Ssstateen.yaml b/arch/ext/Ssstateen.yaml new file mode 100644 index 000000000..9ffd629b6 --- /dev/null +++ b/arch/ext/Ssstateen.yaml @@ -0,0 +1,28 @@ +# yaml-language-server: $schema=../../schemas/ext_schema.json + +$schema: "ext_schema.json#" +kind: extension +name: Ssstateen +long_name: Supervisor-mode view of the state-enable extension +description: | + Supervisor-mode view of the state-enable extension. The + supervisor-mode (`sstateen0-3`) and hypervisor-mode (`hstateen0-3`) + state-enable registers must be provided. + + NOTE: The Smstateen extension specification is an M-mode extension as + it includes M-mode features, but the supervisor-mode visible + components of the extension are named as the Ssstateen extension. Only + Ssstateen is mandated in the RVA22S64 profile when the hypervisor + extension is implemented. These registers are not mandated or + supported options without the hypervisor extension, as there are no + RVA22S64 supported options with relevant state to control in the + absence of the hypervisor extension. + + [NOTE] + This extension was ratified with the RVA22 profiles. +type: privileged +versions: +- version: "1.0.0" + state: ratified + ratification_date: null + # TODO: add param_constraints \ No newline at end of file diff --git a/arch/ext/Ssu64xl.yaml b/arch/ext/Ssu64xl.yaml new file mode 100644 index 000000000..cf1697d13 --- /dev/null +++ b/arch/ext/Ssu64xl.yaml @@ -0,0 +1,20 @@ +# yaml-language-server: $schema=../../schemas/ext_schema.json + +$schema: "ext_schema.json#" +kind: extension +name: Ssu64xl +long_name: 64-bit UXLEN +description: | + `sstatus.UXL` must be capable of holding the value 2 (i.e., UXLEN=64 must be supported). + + [NOTE] + This extension is defined by RVA22. +type: privileged +versions: +- version: "1.0.0" + state: ratified + ratification_date: null + param_constraints: + UXLEN: + schema: + enum: [64, 3264] diff --git a/arch/ext/Zabha.yaml b/arch/ext/Zabha.yaml new file mode 100644 index 000000000..e5c77df5b --- /dev/null +++ b/arch/ext/Zabha.yaml @@ -0,0 +1,14 @@ +# yaml-language-server: $schema=../../schemas/ext_schema.json + +$schema: "ext_schema.json#" +kind: extension +name: Zabha +long_name: Byte and Halfword Atomic Memory Operations +description: | + Adds byte and halfword atomic memory operations to the RISC-V Unprivileged ISA. + type: unprivileged +versions: +- version: "1.0.0" + state: ratified + ratification_date: null + requires: Zaamo diff --git a/arch/ext/Zacas.yaml b/arch/ext/Zacas.yaml new file mode 100644 index 000000000..373ee388c --- /dev/null +++ b/arch/ext/Zacas.yaml @@ -0,0 +1,14 @@ +# yaml-language-server: $schema=../../schemas/ext_schema.json + +$schema: "ext_schema.json#" +kind: extension +name: Zacas +long_name: Atomic Compare-and-Swap (CAS) Instructions +description: | + Adds Word/Doubleword/Quadword compare-and-swap instructions. +type: unprivileged +versions: +- version: "1.0.0" + state: ratified + ratification_date: null + requires: Zaamo diff --git a/arch/ext/Zalasr.yaml b/arch/ext/Zalasr.yaml new file mode 100644 index 000000000..ca1d410b4 --- /dev/null +++ b/arch/ext/Zalasr.yaml @@ -0,0 +1,13 @@ +# yaml-language-server: $schema=../../schemas/ext_schema.json + +$schema: "ext_schema.json#" +kind: extension +name: Zalasr +long_name: Atomic, Load-Acquire Store-Release +description: | + load-acquire and store-release instructions. +type: unprivileged +versions: +- version: "0.3.5" + state: development + ratification_date: null diff --git a/arch/ext/Zawrs.yaml b/arch/ext/Zawrs.yaml new file mode 100644 index 000000000..aabc0ed46 --- /dev/null +++ b/arch/ext/Zawrs.yaml @@ -0,0 +1,22 @@ +# yaml-language-server: $schema=../../schemas/ext_schema.json + +$schema: "ext_schema.json#" +kind: extension +name: Zawrs +long_name: Wait-on-Reservation-Set Instructions +description: | + The `Zawrs` extension defines a pair of instructions to be used in polling loops that allows a + core to enter a low-power state and wait on a store to a memory location. + Waiting for a memory location to be updated is a common pattern in many use cases such as: + + * Contenders for a lock waiting for the lock variable to be updated. + * Consumers waiting on the tail of an empty queue for the producer to queue work/data. + The producer may be code executing on a RISC-V hart, an accelerator device, an external I/O agent. + * Code waiting on a flag to be set in memory indicative of an event occurring. + For example, software on a RISC-V hart may wait on a "done" flag to be set in memory by an + accelerator device indicating completion of a job previously submitted to the device. +type: unprivileged +versions: +- version: "1.1.0" + state: ratified + ratification_date: null diff --git a/arch/ext/Zbkb.yaml b/arch/ext/Zbkb.yaml new file mode 100644 index 000000000..f9c0cb6f9 --- /dev/null +++ b/arch/ext/Zbkb.yaml @@ -0,0 +1,12 @@ +# yaml-language-server: $schema=../../schemas/ext_schema.json + +$schema: "ext_schema.json#" +kind: extension +name: Zbkb +long_name: Bit-manipulation for Cryptography +description: This extension contains instructions essential for implementing common operations in cryptographic workloads. +type: unprivileged +versions: +- version: "1.0.0" + state: ratified + ratification_date: null diff --git a/arch/ext/Zbkc.yaml b/arch/ext/Zbkc.yaml new file mode 100644 index 000000000..3280d7f3f --- /dev/null +++ b/arch/ext/Zbkc.yaml @@ -0,0 +1,17 @@ +# yaml-language-server: $schema=../../schemas/ext_schema.json + +$schema: "ext_schema.json#" +kind: extension +name: Zbkc +long_name: Carry-less multiplication for Cryptography +description: | + Carry-less multiplication is the multiplication in the polynomial ring over GF(2). + This is a critical operation in some cryptographic workloads, particularly the AES-GCM + authenticated encryption scheme. + This extension provides only the instructions needed to efficiently implement the GHASH operation, + which is part of this workload. +type: unprivileged +versions: +- version: "1.0.0" + state: ratified + ratification_date: null diff --git a/arch/ext/Zbkx.yaml b/arch/ext/Zbkx.yaml new file mode 100644 index 000000000..fa2bfa3bb --- /dev/null +++ b/arch/ext/Zbkx.yaml @@ -0,0 +1,18 @@ +# yaml-language-server: $schema=../../schemas/ext_schema.json + +$schema: "ext_schema.json#" +kind: extension +name: Zbkx +long_name: Crossbar permutations +description: | + Adds instructions to implement a "lookup table" for 4 and 8 bit elements inside the general purpose + registers. + + These instructions are useful for expressing N-bit to N-bit boolean operations, and implementing + cryptographic code with secret dependent memory accesses (particularly SBoxes) such that the + execution latency does not depend on the (secret) data being operated on. +type: unprivileged +versions: +- version: "1.0.0" + state: ratified + ratification_date: null diff --git a/arch/ext/Zca.yaml b/arch/ext/Zca.yaml new file mode 100644 index 000000000..32c8f3e97 --- /dev/null +++ b/arch/ext/Zca.yaml @@ -0,0 +1,43 @@ +# yaml-language-server: $schema=../../schemas/ext_schema.json + +$schema: "ext_schema.json#" +kind: extension +name: Zca +long_name: C instructions excluding floating-point +description: | + The Zca extension is added as way to refer to instructions in the `C` extension that do not + include the floating-point loads and stores. + + Therefore it excludes all 16-bit floating point loads and stores: + `c.flw`, `c.flwsp`, `c.fsw`, `c.fswsp`, `c.fld`, `c.fldsp`, `c.fsd`, `c.fsdsp`. + + [NOTE] + The 'C' extension only includes `F`/`D` instructions when `D` and `F` are also specified. + +type: unprivileged +company: + name: RISC-V International + url: https://riscv.org +versions: +- version: "1.0.0" + state: ratified + ratification_date: 2023-04 + repositories: + - url: https://github.com/riscv/riscv-code-size-reduction + branch: main + contributors: + - name: Tariq Kurd + - name: Ibrahim Abu Kharmeh + - name: Torbjørn Viem Ness + - name: Matteo Perotti + - name: Nidal Faour + - name: Bill Traynor + - name: Rafael Sene + - name: Xinlong Wu + - name: sinan + - name: Jeremy Bennett + - name: Heda Chen + - name: Alasdair Armstrong + - name: Graeme Smecher + - name: Nicolas Brunie + - name: Jiawei \ No newline at end of file diff --git a/arch/ext/Zfa.yaml b/arch/ext/Zfa.yaml new file mode 100644 index 000000000..7296e8367 --- /dev/null +++ b/arch/ext/Zfa.yaml @@ -0,0 +1,19 @@ +# yaml-language-server: $schema=../../schemas/ext_schema.json + +$schema: "ext_schema.json#" +kind: extension +name: Zfa +long_name: Extension for Additional Floating-Point Instructions +description: | + `Zfa` adds instructions for immediate loads, IEEE 754-2019 minimum and maximum operations, + round-to-integer operations, and quiet floating-point comparisons. + For RV32D, the `Zfa` extension also adds instructions to transfer double-precision floating-point + values to and from integer registers, and for RV64Q, it adds analogous instructions for + quad-precision floating-point values. + The `Zfa` extension depends on the `F` extension. +type: unprivileged +versions: +- version: "1.0.0" + state: ratified + ratification_date: null + requires: F diff --git a/arch/ext/Zfbfmin.yaml b/arch/ext/Zfbfmin.yaml new file mode 100644 index 000000000..480eda9b8 --- /dev/null +++ b/arch/ext/Zfbfmin.yaml @@ -0,0 +1,24 @@ +# yaml-language-server: $schema=../../schemas/ext_schema.json + +$schema: "ext_schema.json#" +kind: extension +name: Zfbfmin +long_name: Scalar BF16 Converts +description: | + The minimal set of instructions needed to enable scalar support of the BF16 format. + It enables BF16 as an interchange format as it provides conversion between BF16 values and + FP32 values. + + This extension depends upon the single-precision floating-point extension `F`, + and the `flh`, `fsh`, `fmv.x.h`, and `fmv.h.x` instructions as defined in the `Zfh` extension. + + +type: unprivileged +versions: +- version: "1.0.0" + state: ratified + ratification_date: null + requires: + allOf: + - name: F + - name: Zfh diff --git a/arch/ext/Zfh.yaml b/arch/ext/Zfh.yaml new file mode 100644 index 000000000..aec07b95e --- /dev/null +++ b/arch/ext/Zfh.yaml @@ -0,0 +1,18 @@ +# yaml-language-server: $schema=../../schemas/ext_schema.json + +$schema: "ext_schema.json#" +kind: extension +name: Zfh +long_name: Half-precision floating point +description: | + 16-bit half-precision binary floating-point instructions compliant with the IEEE 754-2008 + arithmetic standard. + The `Zfh` extension depends on the single-precision floating-point extension, `F`. + The NaN-boxing scheme is extended to allow a half-precision value to be NaN-boxed inside a + single-precision value (which may be recursively NaN-boxed inside a double- or quad-precision + value when the D or Q extension is present). +type: unprivileged +versions: +- version: "1.0.0" + state: ratified + ratification_date: null diff --git a/arch/ext/Zicbom.yaml b/arch/ext/Zicbom.yaml index 23206267c..e8726aa54 100644 --- a/arch/ext/Zicbom.yaml +++ b/arch/ext/Zicbom.yaml @@ -7,7 +7,7 @@ long_name: Cache block management instructions description: Cache block management instructions type: unprivileged versions: -- version: "1.0.1-b34ea8a" +- version: "1.0.0" state: ratified ratification_date: 2022-05 params: diff --git a/arch/ext/Zicbop.yaml b/arch/ext/Zicbop.yaml index 3cd9c3d01..ed3351ddf 100644 --- a/arch/ext/Zicbop.yaml +++ b/arch/ext/Zicbop.yaml @@ -7,7 +7,7 @@ long_name: Cache block prefetch description: Cache block prefetch instruction type: unprivileged versions: -- version: 1.0.1-b34ea8a +- version: "1.0.0" state: ratified ratification_date: 2022-05 params: diff --git a/arch/ext/Zicboz.yaml b/arch/ext/Zicboz.yaml index 84bd7cf42..ac29c1252 100644 --- a/arch/ext/Zicboz.yaml +++ b/arch/ext/Zicboz.yaml @@ -7,7 +7,7 @@ long_name: Cache block zero instruction description: Cache block zero instruction type: unprivileged versions: -- version: 1.0.1-b34ea8a +- version: "1.0.0" state: ratified ratification_date: 2022-05 params: diff --git a/arch/ext/Zicond.yaml b/arch/ext/Zicond.yaml new file mode 100644 index 000000000..c7f6bf803 --- /dev/null +++ b/arch/ext/Zicond.yaml @@ -0,0 +1,21 @@ +# yaml-language-server: $schema=../../schemas/ext_schema.json + +$schema: "ext_schema.json#" +kind: extension +name: Zicond +long_name: Integer Conditional Operations +description: | + The "Conditional" operations extension provides a simple solution that provides most of the + benefit and all of the flexibility one would desire to support conditional arithmetic and + conditional-select/move operations, while remaining true to the RISC-V design philosophy. + The instructions follow the format for R-type instructions with 3 operands (_i.e._, 2 source + operands and 1 destination operand). + + Using these instructions, branchless sequences can be implemented (typically in two-instruction + sequences) without the need for instruction fusion, special provisions during the decoding of + architectural instructions, or other microarchitectural provisions. +type: unprivileged +versions: +- version: "1.0.0" + state: ratified + ratification_date: null diff --git a/arch/ext/Zimop.yaml b/arch/ext/Zimop.yaml new file mode 100644 index 000000000..28eff880f --- /dev/null +++ b/arch/ext/Zimop.yaml @@ -0,0 +1,16 @@ +# yaml-language-server: $schema=../../schemas/ext_schema.json + +$schema: "ext_schema.json#" +kind: extension +name: Zimop +long_name: May-be Operations +description: | + Instructions that may be operations (MOPs). + MOPs are initially defined to simply write zero to x[rd], but are designed to be redefined by + later extensions to perform some other action. + The Zimop extension defines an encoding space for 40 MOPs. +type: unprivileged +versions: +- version: "1.0.0" + state: ratified + ratification_date: null diff --git a/arch/ext/Zk.yaml b/arch/ext/Zk.yaml new file mode 100644 index 000000000..43ff1149e --- /dev/null +++ b/arch/ext/Zk.yaml @@ -0,0 +1,22 @@ +# yaml-language-server: $schema=../../schemas/ext_schema.json + +$schema: "ext_schema.json#" +kind: extension +name: Zk +long_name: Standard Cryptography Extensions +description: | + This extension is shorthand for the following set of other extensions: + + * `Zkn` + * `Zkr` + * `Zkt` + +type: unprivileged +versions: +- version: "1.0.0" + state: ratified + ratification_date: null + implies: + - ["Zkn", "1.0.0"] + - ["Zkr", "1.0.0"] + - ["Zkt", "1.0.0"] diff --git a/arch/ext/Zkn.yaml b/arch/ext/Zkn.yaml new file mode 100644 index 000000000..81d2f7e33 --- /dev/null +++ b/arch/ext/Zkn.yaml @@ -0,0 +1,28 @@ +# yaml-language-server: $schema=../../schemas/ext_schema.json + +$schema: "ext_schema.json#" +kind: extension +name: Zkn +long_name: NIST Algorithm Suite +description: | + This extension is shorthand for the following set of other extensions: + + * `Zbkb` + * `Zbkc` + * `Zbkx` + * `Zkne` + * `Zknd` + * `Zknh` + +type: unprivileged +versions: +- version: "1.0.0" + state: ratified + ratification_date: null + implies: + - ["Zbkb", "1.0.0"] + - ["Zbkc", "1.0.0"] + - ["Zbkx", "1.0.0"] + - ["Zkne", "1.0.0"] + - ["Zknd", "1.0.0"] + - ["Zknh", "1.0.0"] diff --git a/arch/ext/Zknd.yaml b/arch/ext/Zknd.yaml new file mode 100644 index 000000000..a7771b35c --- /dev/null +++ b/arch/ext/Zknd.yaml @@ -0,0 +1,13 @@ +# yaml-language-server: $schema=../../schemas/ext_schema.json + +$schema: "ext_schema.json#" +kind: extension +name: Zknd +long_name: "NIST Suite: AES Decryption" +description: | + Instructions for accelerating the decryption and key-schedule functions of the AES block cipher. +type: unprivileged +versions: +- version: "1.0.0" + state: ratified + ratification_date: null diff --git a/arch/ext/Zkne.yaml b/arch/ext/Zkne.yaml new file mode 100644 index 000000000..f8191073f --- /dev/null +++ b/arch/ext/Zkne.yaml @@ -0,0 +1,13 @@ +# yaml-language-server: $schema=../../schemas/ext_schema.json + +$schema: "ext_schema.json#" +kind: extension +name: Zkne +long_name: "NIST Suite: AES Encryption" +description: | + Instructions for accelerating the encryption and key-schedule functions of the AES block cipher. +type: unprivileged +versions: +- version: "1.0.0" + state: ratified + ratification_date: null diff --git a/arch/ext/Zknh.yaml b/arch/ext/Zknh.yaml new file mode 100644 index 000000000..33c648065 --- /dev/null +++ b/arch/ext/Zknh.yaml @@ -0,0 +1,13 @@ +# yaml-language-server: $schema=../../schemas/ext_schema.json + +$schema: "ext_schema.json#" +kind: extension +name: Zknh +long_name: "NIST Suite: Hash Function Instructions" +description: | + Instructions for accelerating the SHA2 family of cryptographic hash functions. +type: unprivileged +versions: +- version: "1.0.0" + state: ratified + ratification_date: null diff --git a/arch/ext/Zkr.yaml b/arch/ext/Zkr.yaml new file mode 100644 index 000000000..443685113 --- /dev/null +++ b/arch/ext/Zkr.yaml @@ -0,0 +1,14 @@ +# yaml-language-server: $schema=../../schemas/ext_schema.json + +$schema: "ext_schema.json#" +kind: extension +name: Zkr +long_name: Entropy Source +description: | + Defines the `seed` CSR. + This CSR provides up to 16 physical entropy bits that can be used to seed cryptographic random bit generators. +type: unprivileged +versions: +- version: "1.0.0" + state: ratified + ratification_date: null diff --git a/arch/ext/Zks.yaml b/arch/ext/Zks.yaml new file mode 100644 index 000000000..b9965092e --- /dev/null +++ b/arch/ext/Zks.yaml @@ -0,0 +1,27 @@ +# yaml-language-server: $schema=../../schemas/ext_schema.json + +$schema: "ext_schema.json#" +kind: extension +name: Zks +long_name: ShangMi Algorithm Suite +description: | + This extension is shorthand for the following set of other extensions: + + * `Zbkb` + * `Zbkc` + * `Zbkx` + * `Zksed` + * `Zksh` + +type: unprivileged +versions: +- version: "1.0.0" + state: ratified + ratification_date: null + implies: + - ["Zbkb", "1.0.0"] + - ["Zbkc", "1.0.0"] + - ["Zbkx", "1.0.0"] + - ["Zkne", "1.0.0"] + - ["Zknd", "1.0.0"] + - ["Zknh", "1.0.0"] diff --git a/arch/ext/Zksed.yaml b/arch/ext/Zksed.yaml new file mode 100644 index 000000000..0abcf81b2 --- /dev/null +++ b/arch/ext/Zksed.yaml @@ -0,0 +1,14 @@ +# yaml-language-server: $schema=../../schemas/ext_schema.json + +$schema: "ext_schema.json#" +kind: extension +name: Zksed +long_name: "ShangMi Suite: SM4 Block Cipher Instructions" +description: | + Instructions for accelerating the SM4 Block Cipher. Note that unlike AES, this cipher uses the + same core operation for encryption and decryption, hence there is only one extension for it. +type: unprivileged +versions: +- version: "1.0.0" + state: ratified + ratification_date: null diff --git a/arch/ext/Zksh.yaml b/arch/ext/Zksh.yaml new file mode 100644 index 000000000..e42ce0b1c --- /dev/null +++ b/arch/ext/Zksh.yaml @@ -0,0 +1,13 @@ +# yaml-language-server: $schema=../../schemas/ext_schema.json + +$schema: "ext_schema.json#" +kind: extension +name: Zksh +long_name: "ShangMi Suite: SM3 Hash Function Instructions" +description: | + Instructions for accelerating the SM3 hash function. +type: unprivileged +versions: +- version: "1.0.0" + state: ratified + ratification_date: null diff --git a/arch/ext/Zvbb.yaml b/arch/ext/Zvbb.yaml new file mode 100644 index 000000000..c11cb18db --- /dev/null +++ b/arch/ext/Zvbb.yaml @@ -0,0 +1,14 @@ +# yaml-language-server: $schema=../../schemas/ext_schema.json + +$schema: "ext_schema.json#" +kind: extension +name: Zvbb +long_name: Vector Basic Bit-manipulation +description: | + Vector basic bit-manipulation instructions. +type: unprivileged +versions: +- version: "1.0.0" + state: ratified + ratification_date: null + implies: [Zvkb, "1.0.0"] diff --git a/arch/ext/Zvbc.yaml b/arch/ext/Zvbc.yaml new file mode 100644 index 000000000..75d387c3f --- /dev/null +++ b/arch/ext/Zvbc.yaml @@ -0,0 +1,16 @@ +# yaml-language-server: $schema=../../schemas/ext_schema.json + +$schema: "ext_schema.json#" +kind: extension +name: Zvbc +long_name: Vector Carryless Multiplication +description: | + General purpose carryless multiplication instructions which are commonly used in cryptography and + hashing (e.g., Elliptic curve cryptography, GHASH, CRC). + + These instructions are only defined for SEW=64. +type: unprivileged +versions: +- version: "1.0.0" + state: ratified + ratification_date: null diff --git a/arch/ext/Zvfbfmin.yaml b/arch/ext/Zvfbfmin.yaml new file mode 100644 index 000000000..5476e509b --- /dev/null +++ b/arch/ext/Zvfbfmin.yaml @@ -0,0 +1,21 @@ +# yaml-language-server: $schema=../../schemas/ext_schema.json + +$schema: "ext_schema.json#" +kind: extension +name: Zvfbfmin +long_name: Vector BF16 Converts +description: | + This extension provides the minimal set of instructions needed to enable vector support of the + BF16 format. + It enables BF16 as an interchange format as it provides conversion between BF16 values and FP32 values. + + This extension depends upon either the `V` extension or the `Zve32f` embedded vector extension. +type: unprivileged +versions: +- version: "1.0.0" + state: ratified + ratification_date: null + requires: + anyOf: + - V + - Zve32f diff --git a/arch/ext/Zvfbfwma.yaml b/arch/ext/Zvfbfwma.yaml new file mode 100644 index 000000000..855a6cfd2 --- /dev/null +++ b/arch/ext/Zvfbfwma.yaml @@ -0,0 +1,19 @@ +# yaml-language-server: $schema=../../schemas/ext_schema.json + +$schema: "ext_schema.json#" +kind: extension +name: Zvfbfwma +long_name: Vector BF16 widening mul-add +description: | + This extension provides a vector widening BF16 mul-add instruction that accumulates into FP32. + + This extension depends upon the `Zvfbfmin` extension and the `Zfbfmin` extension. +type: unprivileged +versions: +- version: "1.0.0" + state: ratified + ratification_date: null + requires: + allOf: + - Zvfbfmin + - Zfbfmin diff --git a/arch/ext/Zvkb.yaml b/arch/ext/Zvkb.yaml new file mode 100644 index 000000000..b8439d443 --- /dev/null +++ b/arch/ext/Zvkb.yaml @@ -0,0 +1,13 @@ +# yaml-language-server: $schema=../../schemas/ext_schema.json + +$schema: "ext_schema.json#" +kind: extension +name: Zvkb +long_name: Vector Cryptography Bit-manipulation +description: | + Vector bit-manipulation instructions that are essential for implementing common cryptographic workloads securely & efficiently. +type: unprivileged +versions: +- version: "1.0.0" + state: ratified + ratification_date: null diff --git a/arch/ext/Zvkg.yaml b/arch/ext/Zvkg.yaml new file mode 100644 index 000000000..af2b49016 --- /dev/null +++ b/arch/ext/Zvkg.yaml @@ -0,0 +1,23 @@ +# yaml-language-server: $schema=../../schemas/ext_schema.json + +$schema: "ext_schema.json#" +kind: extension +name: Zvkg +long_name: Vector GCM/GMAC +description: | + Instructions to enable the efficient implementation of GHASHH which is used in Galois/Counter Mode + (GCM) and Galois Message Authentication Code (GMAC). + + All of these instructions work on 128-bit element groups comprised of four 32-bit elements. + + To help avoid side-channel timing attacks, these instructions shall be implemented with data-independent timing. + + The number of element groups to be processed is vl/EGS. + vl must be set to the number of SEW=32 elements to be processed and therefore must be a multiple of EGS=4. + + Likewise, vstart must be a multiple of EGS=4. +type: unprivileged +versions: +- version: "1.0.0" + state: ratified + ratification_date: null diff --git a/arch/ext/Zvkn.yaml b/arch/ext/Zvkn.yaml new file mode 100644 index 000000000..10b545969 --- /dev/null +++ b/arch/ext/Zvkn.yaml @@ -0,0 +1,23 @@ +# yaml-language-server: $schema=../../schemas/ext_schema.json + +$schema: "ext_schema.json#" +kind: extension +name: Zvkn +long_name: NIST Algorithm Suite +description: | + This extension is shorthand for the following set of other extensions: + + * `Zvkned` + * `Zvknhb` + * `Zvkb` + * `Zvkt` +type: unprivileged +versions: +- version: "1.0.0" + state: ratified + ratification_date: null + implies: + - [Zvkned, "1.0.0"] + - [Zvknhb, "1.0.0"] + - [Zvkb, "1.0.0"] + - [Zvkt, "1.0.0"] diff --git a/arch/ext/Zvkned.yaml b/arch/ext/Zvkned.yaml new file mode 100644 index 000000000..e52e5bf45 --- /dev/null +++ b/arch/ext/Zvkned.yaml @@ -0,0 +1,19 @@ +# yaml-language-server: $schema=../../schemas/ext_schema.json + +$schema: "ext_schema.json#" +kind: extension +name: Zvkned +long_name: "NIST Suite: Vector AES Block Cipher" +description: | + Instructions for accelerating encryption, decryption and key-schedule functions of the AES block + cipher as defined in Federal Information Processing Standards Publication 197. + + All of these instructions work on 128-bit element groups comprised of four 32-bit elements. + + To help avoid side-channel timing attacks, these instructions shall be implemented with data-independent timing. + +type: unprivileged +versions: +- version: "1.0.0" + state: ratified + ratification_date: null diff --git a/arch/ext/Zvknha.yaml b/arch/ext/Zvknha.yaml new file mode 100644 index 000000000..70166f48a --- /dev/null +++ b/arch/ext/Zvknha.yaml @@ -0,0 +1,14 @@ +# yaml-language-server: $schema=../../schemas/ext_schema.json + +$schema: "ext_schema.json#" +kind: extension +name: Zvknha +long_name: "NIST Suite: Vector SHA-2 Secure Hash (SHA-256)" +description: | + Instructions for accelerating 256-bit SHA-2 as defined in FIPS PUB 180-4 Secure Hash Standard (SHS) + +type: unprivileged +versions: +- version: "1.0.0" + state: ratified + ratification_date: null diff --git a/arch/ext/Zvknhb.yaml b/arch/ext/Zvknhb.yaml new file mode 100644 index 000000000..65ff021c5 --- /dev/null +++ b/arch/ext/Zvknhb.yaml @@ -0,0 +1,15 @@ +# yaml-language-server: $schema=../../schemas/ext_schema.json + +$schema: "ext_schema.json#" +kind: extension +name: Zvknhb +long_name: "NIST Suite: Vector SHA-2 Secure Hash (SHA-256 + SHA-512)" +description: | + Instructions for accelerating 256-bit/512-bit SHA-2 as defined in FIPS PUB 180-4 Secure Hash Standard (SHS) + +type: unprivileged +versions: +- version: "1.0.0" + state: ratified + ratification_date: null + implies: [Zvknha, "1.0.0"] diff --git a/arch/ext/Zvks.yaml b/arch/ext/Zvks.yaml new file mode 100644 index 000000000..f30c71bbf --- /dev/null +++ b/arch/ext/Zvks.yaml @@ -0,0 +1,23 @@ +# yaml-language-server: $schema=../../schemas/ext_schema.json + +$schema: "ext_schema.json#" +kind: extension +name: Zvks +long_name: ShangMi Algorithm Suite +description: | + This extension is shorthand for the following set of other extensions: + + * `Zvksed` + * `Zvksh` + * `Zvkb` + * `Zvkt` +type: unprivileged +versions: +- version: "1.0.0" + state: ratified + ratification_date: null + implies: + - [Zvksed, "1.0.0"] + - [Zvksh, "1.0.0"] + - [Zvkb, "1.0.0"] + - [Zvkt, "1.0.0"] diff --git a/arch/ext/Zvksed.yaml b/arch/ext/Zvksed.yaml new file mode 100644 index 000000000..420b902fb --- /dev/null +++ b/arch/ext/Zvksed.yaml @@ -0,0 +1,21 @@ +# yaml-language-server: $schema=../../schemas/ext_schema.json + +$schema: "ext_schema.json#" +kind: extension +name: Zvksed +long_name: "ShangMi Suite: SM4 Block Cipher" +description: | + Instructions for accelerating encryption, decryption and key-schedule functions of the SM4 block cipher. + + The SM4 block cipher is specified in 32907-2016: {SM4} Block Cipher Algorithm (GB/T 32907-2016: SM4 Block Cipher Algorithm, 2016) + + There are other various sources available that describe the SM4 block cipher. While not the final version of the standard, RFC 8998 ShangMi (SM) Cipher Suites for TLS 1.3 is useful and easy to access. + + All of these instructions work on 128-bit element groups comprised of four 32-bit elements. + + To help avoid side-channel timing attacks, these instructions shall be implemented with data-independent timing. +type: unprivileged +versions: +- version: "1.0.0" + state: ratified + ratification_date: null diff --git a/arch/ext/Zvksh.yaml b/arch/ext/Zvksh.yaml new file mode 100644 index 000000000..0b66a7abc --- /dev/null +++ b/arch/ext/Zvksh.yaml @@ -0,0 +1,17 @@ +# yaml-language-server: $schema=../../schemas/ext_schema.json + +$schema: "ext_schema.json#" +kind: extension +name: Zvksh +long_name: "ShangMi Suite: SM3 Secure Hash" +description: | + Instructions for accelerating functions of the SM3 Hash Function. + + All of these instructions work on 256-bit element groups comprised of eight 32-bit elements. + + To help avoid side-channel timing attacks, these instructions shall be implemented with data-independent timing. +type: unprivileged +versions: +- version: "1.0.0" + state: ratified + ratification_date: null diff --git a/arch/ext/Zvkt.yaml b/arch/ext/Zvkt.yaml new file mode 100644 index 000000000..a54fa04a4 --- /dev/null +++ b/arch/ext/Zvkt.yaml @@ -0,0 +1,25 @@ +# yaml-language-server: $schema=../../schemas/ext_schema.json + +$schema: "ext_schema.json#" +kind: extension +name: Zvkt +long_name: Vector Data-Independent Execution Latency +description: | + The Zvkt extension requires all implemented instructions from the following list to be executed + with data-independent execution latency. + + Data-independent execution latency (DIEL) applies to all data operands of an instruction, even + those that are not a part of the body or that are inactive. + However, DIEL does not apply to other values such as vl, vtype, and the mask (when used to control + execution of a masked vector instruction). + Also, DIEL does not apply to constant values specified in the instruction encoding such as the + use of the zero register (x0), and, in the case of immediate forms of an instruction, the values + in the immediate fields (i.e., imm, and uimm). + + In some cases --- which are explicitly specified in the lists below --- operands that are used as + control rather than data are exempt from DIEL. +type: unprivileged +versions: +- version: "1.0.0" + state: ratified + ratification_date: null diff --git a/arch/inst/F/fleq.s.yaml b/arch/inst/F/fleq.s.yaml index 0f5b5796a..fd74469b9 100644 --- a/arch/inst/F/fleq.s.yaml +++ b/arch/inst/F/fleq.s.yaml @@ -6,7 +6,7 @@ name: fleq.s long_name: No synopsis available. description: | No description available. -definedBy: F, Zfa +definedBy: { allOf: [F, Zfa] } assembly: xd, fs1, fs2 encoding: match: 1010000----------100-----1010011 diff --git a/arch/inst/F/fli.s.yaml b/arch/inst/F/fli.s.yaml index 008c13fa8..29427a04d 100644 --- a/arch/inst/F/fli.s.yaml +++ b/arch/inst/F/fli.s.yaml @@ -6,7 +6,7 @@ name: fli.s long_name: No synopsis available. description: | No description available. -definedBy: F, Zfa +definedBy: { allOf: [F, Zfa] } assembly: fd, fs1 encoding: match: 111100000001-----000-----1010011 diff --git a/arch/inst/F/fltq.s.yaml b/arch/inst/F/fltq.s.yaml index fcda54723..9ca1db1a9 100644 --- a/arch/inst/F/fltq.s.yaml +++ b/arch/inst/F/fltq.s.yaml @@ -6,7 +6,7 @@ name: fltq.s long_name: No synopsis available. description: | No description available. -definedBy: F, Zfa +definedBy: { allOf: [F, Zfa] } assembly: xd, fs1, fs2 encoding: match: 1010000----------101-----1010011 diff --git a/arch/inst/F/fmaxm.s.yaml b/arch/inst/F/fmaxm.s.yaml index b303bdfdd..cc902442b 100644 --- a/arch/inst/F/fmaxm.s.yaml +++ b/arch/inst/F/fmaxm.s.yaml @@ -6,7 +6,7 @@ name: fmaxm.s long_name: No synopsis available. description: | No description available. -definedBy: F, Zfa +definedBy: { allOf: [F, Zfa] } assembly: xd, xs1, xs2 encoding: match: 0010100----------011-----1010011 diff --git a/arch/inst/F/fminm.s.yaml b/arch/inst/F/fminm.s.yaml index 83b6cf7bf..61f326a90 100644 --- a/arch/inst/F/fminm.s.yaml +++ b/arch/inst/F/fminm.s.yaml @@ -6,7 +6,7 @@ name: fminm.s long_name: No synopsis available. description: | No description available. -definedBy: F, Zfa +definedBy: { allOf: [F, Zfa] } assembly: fd, fs1, fs2 encoding: match: 0010100----------010-----1010011 diff --git a/arch/inst/F/fround.s.yaml b/arch/inst/F/fround.s.yaml index cd57704a9..840c9869c 100644 --- a/arch/inst/F/fround.s.yaml +++ b/arch/inst/F/fround.s.yaml @@ -6,7 +6,7 @@ name: fround.s long_name: No synopsis available. description: | No description available. -definedBy: F, Zfa +definedBy: { allOf: [F, Zfa] } assembly: fd, xs1, rm encoding: match: 010000000100-------------1010011 diff --git a/arch/inst/F/froundnx.s.yaml b/arch/inst/F/froundnx.s.yaml index 828dfd8b6..e1990a436 100644 --- a/arch/inst/F/froundnx.s.yaml +++ b/arch/inst/F/froundnx.s.yaml @@ -6,7 +6,7 @@ name: froundnx.s long_name: No synopsis available. description: | No description available. -definedBy: F, Zfa +definedBy: { allOf: [F, Zfa] } assembly: fd, rs1, rm encoding: match: 010000000101-------------1010011 diff --git a/arch/inst/V/vadd.vi.yaml b/arch/inst/V/vadd.vi.yaml index f0246118a..fa6621767 100644 --- a/arch/inst/V/vadd.vi.yaml +++ b/arch/inst/V/vadd.vi.yaml @@ -24,7 +24,7 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/inst/V/vadd.vv.yaml b/arch/inst/V/vadd.vv.yaml index 94fc3d0f3..3db8f3f4a 100644 --- a/arch/inst/V/vadd.vv.yaml +++ b/arch/inst/V/vadd.vv.yaml @@ -24,7 +24,7 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/inst/V/vadd.vx.yaml b/arch/inst/V/vadd.vx.yaml index 4067105f1..cad144d40 100644 --- a/arch/inst/V/vadd.vx.yaml +++ b/arch/inst/V/vadd.vx.yaml @@ -24,7 +24,7 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/inst/V/vrsub.vi.yaml b/arch/inst/V/vrsub.vi.yaml index c2e1a4fe1..e84285d0d 100644 --- a/arch/inst/V/vrsub.vi.yaml +++ b/arch/inst/V/vrsub.vi.yaml @@ -24,7 +24,7 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/inst/V/vrsub.vx.yaml b/arch/inst/V/vrsub.vx.yaml index 28dc3fd33..ac509828a 100644 --- a/arch/inst/V/vrsub.vx.yaml +++ b/arch/inst/V/vrsub.vx.yaml @@ -24,7 +24,7 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/inst/V/vsub.vv.yaml b/arch/inst/V/vsub.vv.yaml index a257e86d5..d007ca914 100644 --- a/arch/inst/V/vsub.vv.yaml +++ b/arch/inst/V/vsub.vv.yaml @@ -24,7 +24,7 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/inst/V/vsub.vx.yaml b/arch/inst/V/vsub.vx.yaml index 193e7408d..07500623d 100644 --- a/arch/inst/V/vsub.vx.yaml +++ b/arch/inst/V/vsub.vx.yaml @@ -24,7 +24,7 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/inst/V/vwadd.vv.yaml b/arch/inst/V/vwadd.vv.yaml index 3b237223e..fcd2581b8 100644 --- a/arch/inst/V/vwadd.vv.yaml +++ b/arch/inst/V/vwadd.vv.yaml @@ -24,7 +24,7 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/inst/V/vwadd.vx.yaml b/arch/inst/V/vwadd.vx.yaml index ed7378983..e991fbfcb 100644 --- a/arch/inst/V/vwadd.vx.yaml +++ b/arch/inst/V/vwadd.vx.yaml @@ -24,7 +24,7 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/inst/V/vwadd.wv.yaml b/arch/inst/V/vwadd.wv.yaml index 035b42755..2f7f2a7c6 100644 --- a/arch/inst/V/vwadd.wv.yaml +++ b/arch/inst/V/vwadd.wv.yaml @@ -24,7 +24,7 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/inst/V/vwadd.wx.yaml b/arch/inst/V/vwadd.wx.yaml index 07a880ebe..e100316c6 100644 --- a/arch/inst/V/vwadd.wx.yaml +++ b/arch/inst/V/vwadd.wx.yaml @@ -24,7 +24,7 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/inst/V/vwaddu.vv.yaml b/arch/inst/V/vwaddu.vv.yaml index e2c393361..5b11b272b 100644 --- a/arch/inst/V/vwaddu.vv.yaml +++ b/arch/inst/V/vwaddu.vv.yaml @@ -24,7 +24,7 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/inst/V/vwaddu.vx.yaml b/arch/inst/V/vwaddu.vx.yaml index cf58098a2..69565d209 100644 --- a/arch/inst/V/vwaddu.vx.yaml +++ b/arch/inst/V/vwaddu.vx.yaml @@ -24,7 +24,7 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/inst/V/vwaddu.wv.yaml b/arch/inst/V/vwaddu.wv.yaml index 198b61f01..48f1882b3 100644 --- a/arch/inst/V/vwaddu.wv.yaml +++ b/arch/inst/V/vwaddu.wv.yaml @@ -24,7 +24,7 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/inst/V/vwaddu.wx.yaml b/arch/inst/V/vwaddu.wx.yaml index 8eb165cdd..4dc6ae773 100644 --- a/arch/inst/V/vwaddu.wx.yaml +++ b/arch/inst/V/vwaddu.wx.yaml @@ -24,7 +24,7 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/inst/V/vwsub.vv.yaml b/arch/inst/V/vwsub.vv.yaml index def690ade..0544e5dda 100644 --- a/arch/inst/V/vwsub.vv.yaml +++ b/arch/inst/V/vwsub.vv.yaml @@ -24,7 +24,7 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/inst/V/vwsub.vx.yaml b/arch/inst/V/vwsub.vx.yaml index 037a57975..b02a815eb 100644 --- a/arch/inst/V/vwsub.vx.yaml +++ b/arch/inst/V/vwsub.vx.yaml @@ -24,7 +24,7 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/inst/V/vwsub.wv.yaml b/arch/inst/V/vwsub.wv.yaml index d295bd785..9ea214c0e 100644 --- a/arch/inst/V/vwsub.wv.yaml +++ b/arch/inst/V/vwsub.wv.yaml @@ -24,7 +24,7 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/inst/V/vwsub.wx.yaml b/arch/inst/V/vwsub.wx.yaml index 76b836b67..113118a71 100644 --- a/arch/inst/V/vwsub.wx.yaml +++ b/arch/inst/V/vwsub.wx.yaml @@ -24,7 +24,7 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/inst/V/vwsubu.vv.yaml b/arch/inst/V/vwsubu.vv.yaml index c2b5c8eb0..d7e82e561 100644 --- a/arch/inst/V/vwsubu.vv.yaml +++ b/arch/inst/V/vwsubu.vv.yaml @@ -24,7 +24,7 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/inst/V/vwsubu.vx.yaml b/arch/inst/V/vwsubu.vx.yaml index d70695bb2..1823dc06b 100644 --- a/arch/inst/V/vwsubu.vx.yaml +++ b/arch/inst/V/vwsubu.vx.yaml @@ -24,7 +24,7 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/inst/V/vwsubu.wv.yaml b/arch/inst/V/vwsubu.wv.yaml index 5da6b8f10..9c4fe76a0 100644 --- a/arch/inst/V/vwsubu.wv.yaml +++ b/arch/inst/V/vwsubu.wv.yaml @@ -24,7 +24,7 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/inst/V/vwsubu.wx.yaml b/arch/inst/V/vwsubu.wx.yaml index 3aab9d1b1..7d1ba7850 100644 --- a/arch/inst/V/vwsubu.wx.yaml +++ b/arch/inst/V/vwsubu.wx.yaml @@ -24,7 +24,7 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/inst/Zbp/gorci.yaml b/arch/inst/Zbp/gorci.yaml deleted file mode 100644 index 4b61d319b..000000000 --- a/arch/inst/Zbp/gorci.yaml +++ /dev/null @@ -1,29 +0,0 @@ -# yaml-language-server: $schema=../../../schemas/inst_schema.json - -$schema: inst_schema.json# -kind: instruction -name: gorci -long_name: No synopsis available. -description: | - No description available. -definedBy: - anyOf: [B, Zbp] -assembly: xd, xs1, shamt -encoding: - match: 001010-----------101-----0010011 - variables: - - name: shamt - location: 25-20 - - name: rs1 - location: 19-15 - - name: rd - location: 11-7 -access: - s: always - u: always - vs: always - vu: always -data_independent_timing: false -base: 64 -operation(): | - diff --git a/arch/inst/Zbp/grevi.yaml b/arch/inst/Zbp/grevi.yaml deleted file mode 100644 index 89bc7860a..000000000 --- a/arch/inst/Zbp/grevi.yaml +++ /dev/null @@ -1,29 +0,0 @@ -# yaml-language-server: $schema=../../../schemas/inst_schema.json - -$schema: inst_schema.json# -kind: instruction -name: grevi -long_name: No synopsis available. -description: | - No description available. -definedBy: - anyOf: [B, Zbp] -assembly: xd, xs1, shamt -encoding: - match: 011010-----------101-----0010011 - variables: - - name: shamt - location: 25-20 - - name: rs1 - location: 19-15 - - name: rd - location: 11-7 -access: - s: always - u: always - vs: always - vu: always -data_independent_timing: false -base: 64 -operation(): | - diff --git a/arch/inst/Zbp/shfli.yaml b/arch/inst/Zbp/shfli.yaml deleted file mode 100644 index 3b30aae23..000000000 --- a/arch/inst/Zbp/shfli.yaml +++ /dev/null @@ -1,29 +0,0 @@ -# yaml-language-server: $schema=../../../schemas/inst_schema.json - -$schema: inst_schema.json# -kind: instruction -name: shfli -long_name: No synopsis available. -description: | - No description available. -definedBy: - anyOf: [B, Zbp] -assembly: xd, xs1, shamt -encoding: - match: 0000100----------001-----0010011 - variables: - - name: shamt - location: 24-20 - - name: rs1 - location: 19-15 - - name: rd - location: 11-7 -access: - s: always - u: always - vs: always - vu: always -data_independent_timing: false -base: 64 -operation(): | - diff --git a/arch/inst/Zbp/unshfli.yaml b/arch/inst/Zbp/unshfli.yaml deleted file mode 100644 index 8038269f7..000000000 --- a/arch/inst/Zbp/unshfli.yaml +++ /dev/null @@ -1,29 +0,0 @@ -# yaml-language-server: $schema=../../../schemas/inst_schema.json - -$schema: inst_schema.json# -kind: instruction -name: unshfli -long_name: No synopsis available. -description: | - No description available. -definedBy: - anyOf: [B, Zbp] -assembly: xd, xs1, shamt -encoding: - match: 0000100----------101-----0010011 - variables: - - name: shamt - location: 24-20 - - name: rs1 - location: 19-15 - - name: rd - location: 11-7 -access: - s: always - u: always - vs: always - vu: always -data_independent_timing: false -base: 64 -operation(): | - diff --git a/arch/inst/Zbp/xperm16.yaml b/arch/inst/Zbp/xperm16.yaml deleted file mode 100644 index afd0ea3fe..000000000 --- a/arch/inst/Zbp/xperm16.yaml +++ /dev/null @@ -1,28 +0,0 @@ -# yaml-language-server: $schema=../../../schemas/inst_schema.json - -$schema: inst_schema.json# -kind: instruction -name: xperm16 -long_name: No synopsis available. -description: | - No description available. -definedBy: - anyOf: [B, Zbp] -assembly: xd, xs1, xs2 -encoding: - match: 0010100----------110-----0110011 - variables: - - name: rs2 - location: 24-20 - - name: rs1 - location: 19-15 - - name: rd - location: 11-7 -access: - s: always - u: always - vs: always - vu: always -data_independent_timing: false -operation(): | - diff --git a/arch/inst/Zbp/xperm32.yaml b/arch/inst/Zbp/xperm32.yaml deleted file mode 100644 index 8045330db..000000000 --- a/arch/inst/Zbp/xperm32.yaml +++ /dev/null @@ -1,29 +0,0 @@ -# yaml-language-server: $schema=../../../schemas/inst_schema.json - -$schema: inst_schema.json# -kind: instruction -name: xperm32 -long_name: No synopsis available. -description: | - No description available. -definedBy: - anyOf: [B, Zbp] -assembly: xd, xs1, xs2 -encoding: - match: 0010100----------000-----0110011 - variables: - - name: rs2 - location: 24-20 - - name: rs1 - location: 19-15 - - name: rd - location: 11-7 -access: - s: always - u: always - vs: always - vu: always -data_independent_timing: false -base: 64 -operation(): | - diff --git a/arch/inst/Zvbb/vandn.vv.yaml b/arch/inst/Zvbb/vandn.vv.yaml index 27667b63e..ed9f4469d 100644 --- a/arch/inst/Zvbb/vandn.vv.yaml +++ b/arch/inst/Zvbb/vandn.vv.yaml @@ -25,6 +25,6 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/inst/Zvbb/vandn.vx.yaml b/arch/inst/Zvbb/vandn.vx.yaml index 252b3444c..86f36b9cb 100644 --- a/arch/inst/Zvbb/vandn.vx.yaml +++ b/arch/inst/Zvbb/vandn.vx.yaml @@ -25,6 +25,6 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/inst/Zvbb/vbrev.v.yaml b/arch/inst/Zvbb/vbrev.v.yaml index 175e50f27..c08d7299f 100644 --- a/arch/inst/Zvbb/vbrev.v.yaml +++ b/arch/inst/Zvbb/vbrev.v.yaml @@ -23,6 +23,6 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/inst/Zvbb/vbrev8.v.yaml b/arch/inst/Zvbb/vbrev8.v.yaml index 14df079e2..b645205cc 100644 --- a/arch/inst/Zvbb/vbrev8.v.yaml +++ b/arch/inst/Zvbb/vbrev8.v.yaml @@ -23,6 +23,6 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/inst/Zvbb/vclz.v.yaml b/arch/inst/Zvbb/vclz.v.yaml index 52776b73d..dbd756f61 100644 --- a/arch/inst/Zvbb/vclz.v.yaml +++ b/arch/inst/Zvbb/vclz.v.yaml @@ -23,6 +23,6 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/inst/Zvbb/vcpop.v.yaml b/arch/inst/Zvbb/vcpop.v.yaml index 0145035e4..49aac39e3 100644 --- a/arch/inst/Zvbb/vcpop.v.yaml +++ b/arch/inst/Zvbb/vcpop.v.yaml @@ -23,6 +23,6 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/inst/Zvbb/vctz.v.yaml b/arch/inst/Zvbb/vctz.v.yaml index 0b03fbad5..da9ad2996 100644 --- a/arch/inst/Zvbb/vctz.v.yaml +++ b/arch/inst/Zvbb/vctz.v.yaml @@ -23,6 +23,6 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/inst/Zvbb/vrev8.v.yaml b/arch/inst/Zvbb/vrev8.v.yaml index 79477546e..3e7be7466 100644 --- a/arch/inst/Zvbb/vrev8.v.yaml +++ b/arch/inst/Zvbb/vrev8.v.yaml @@ -23,6 +23,6 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/inst/Zvbb/vrol.vv.yaml b/arch/inst/Zvbb/vrol.vv.yaml index 1c54fa104..6971629fc 100644 --- a/arch/inst/Zvbb/vrol.vv.yaml +++ b/arch/inst/Zvbb/vrol.vv.yaml @@ -25,6 +25,6 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/inst/Zvbb/vrol.vx.yaml b/arch/inst/Zvbb/vrol.vx.yaml index 395871d9e..775ec3b63 100644 --- a/arch/inst/Zvbb/vrol.vx.yaml +++ b/arch/inst/Zvbb/vrol.vx.yaml @@ -25,6 +25,6 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/inst/Zvbb/vror.vi.yaml b/arch/inst/Zvbb/vror.vi.yaml index f2dc45e56..698df6a50 100644 --- a/arch/inst/Zvbb/vror.vi.yaml +++ b/arch/inst/Zvbb/vror.vi.yaml @@ -25,6 +25,6 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/inst/Zvbb/vror.vv.yaml b/arch/inst/Zvbb/vror.vv.yaml index 5289f2f82..51fef4e4b 100644 --- a/arch/inst/Zvbb/vror.vv.yaml +++ b/arch/inst/Zvbb/vror.vv.yaml @@ -25,6 +25,6 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/inst/Zvbb/vror.vx.yaml b/arch/inst/Zvbb/vror.vx.yaml index 40a3469c7..57404b802 100644 --- a/arch/inst/Zvbb/vror.vx.yaml +++ b/arch/inst/Zvbb/vror.vx.yaml @@ -25,6 +25,6 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/inst/Zvbb/vwsll.vi.yaml b/arch/inst/Zvbb/vwsll.vi.yaml index 86264addb..10051c43d 100644 --- a/arch/inst/Zvbb/vwsll.vi.yaml +++ b/arch/inst/Zvbb/vwsll.vi.yaml @@ -25,6 +25,6 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/inst/Zvbb/vwsll.vv.yaml b/arch/inst/Zvbb/vwsll.vv.yaml index 4e6c43fd1..a15a1d27f 100644 --- a/arch/inst/Zvbb/vwsll.vv.yaml +++ b/arch/inst/Zvbb/vwsll.vv.yaml @@ -25,6 +25,6 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/inst/Zvbb/vwsll.vx.yaml b/arch/inst/Zvbb/vwsll.vx.yaml index 56c02f818..6bdacea97 100644 --- a/arch/inst/Zvbb/vwsll.vx.yaml +++ b/arch/inst/Zvbb/vwsll.vx.yaml @@ -25,6 +25,6 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/inst/Zvbc/vclmul.vv.yaml b/arch/inst/Zvbc/vclmul.vv.yaml index bb69c75ef..8652d0095 100644 --- a/arch/inst/Zvbc/vclmul.vv.yaml +++ b/arch/inst/Zvbc/vclmul.vv.yaml @@ -25,6 +25,6 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/inst/Zvbc/vclmul.vx.yaml b/arch/inst/Zvbc/vclmul.vx.yaml index 550a28392..6e9759ed2 100644 --- a/arch/inst/Zvbc/vclmul.vx.yaml +++ b/arch/inst/Zvbc/vclmul.vx.yaml @@ -25,6 +25,6 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/inst/Zvbc/vclmulh.vv.yaml b/arch/inst/Zvbc/vclmulh.vv.yaml index bce6dbdcf..fdc8a9390 100644 --- a/arch/inst/Zvbc/vclmulh.vv.yaml +++ b/arch/inst/Zvbc/vclmulh.vv.yaml @@ -25,6 +25,6 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/inst/Zvbc/vclmulh.vx.yaml b/arch/inst/Zvbc/vclmulh.vx.yaml index 5bd7e1ae3..458a15108 100644 --- a/arch/inst/Zvbc/vclmulh.vx.yaml +++ b/arch/inst/Zvbc/vclmulh.vx.yaml @@ -25,6 +25,6 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/inst/Zvkg/vghsh.vv.yaml b/arch/inst/Zvkg/vghsh.vv.yaml index ffaa78827..d95763ba5 100644 --- a/arch/inst/Zvkg/vghsh.vv.yaml +++ b/arch/inst/Zvkg/vghsh.vv.yaml @@ -22,6 +22,6 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/inst/Zvkg/vgmul.vv.yaml b/arch/inst/Zvkg/vgmul.vv.yaml index a1d2dc1ed..bcb3e10e6 100644 --- a/arch/inst/Zvkg/vgmul.vv.yaml +++ b/arch/inst/Zvkg/vgmul.vv.yaml @@ -20,6 +20,6 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/inst/Zvkn/vaesdf.vs.yaml b/arch/inst/Zvkn/vaesdf.vs.yaml index 0996304f1..e569d2ab6 100644 --- a/arch/inst/Zvkn/vaesdf.vs.yaml +++ b/arch/inst/Zvkn/vaesdf.vs.yaml @@ -21,6 +21,6 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/inst/Zvkn/vaesdf.vv.yaml b/arch/inst/Zvkn/vaesdf.vv.yaml index aa4a6c059..e7bcadeea 100644 --- a/arch/inst/Zvkn/vaesdf.vv.yaml +++ b/arch/inst/Zvkn/vaesdf.vv.yaml @@ -21,6 +21,6 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/inst/Zvkn/vaesdm.vs.yaml b/arch/inst/Zvkn/vaesdm.vs.yaml index 68f3a3004..988597da8 100644 --- a/arch/inst/Zvkn/vaesdm.vs.yaml +++ b/arch/inst/Zvkn/vaesdm.vs.yaml @@ -21,6 +21,6 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/inst/Zvkn/vaesdm.vv.yaml b/arch/inst/Zvkn/vaesdm.vv.yaml index c3c043f20..d03a10af2 100644 --- a/arch/inst/Zvkn/vaesdm.vv.yaml +++ b/arch/inst/Zvkn/vaesdm.vv.yaml @@ -21,6 +21,6 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/inst/Zvkn/vaesef.vs.yaml b/arch/inst/Zvkn/vaesef.vs.yaml index c335467b3..408bab84e 100644 --- a/arch/inst/Zvkn/vaesef.vs.yaml +++ b/arch/inst/Zvkn/vaesef.vs.yaml @@ -21,6 +21,6 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/inst/Zvkn/vaesef.vv.yaml b/arch/inst/Zvkn/vaesef.vv.yaml index 07d123a8f..679174e37 100644 --- a/arch/inst/Zvkn/vaesef.vv.yaml +++ b/arch/inst/Zvkn/vaesef.vv.yaml @@ -21,6 +21,6 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/inst/Zvkn/vaesem.vs.yaml b/arch/inst/Zvkn/vaesem.vs.yaml index 574400752..accce3c93 100644 --- a/arch/inst/Zvkn/vaesem.vs.yaml +++ b/arch/inst/Zvkn/vaesem.vs.yaml @@ -21,6 +21,6 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/inst/Zvkn/vaesem.vv.yaml b/arch/inst/Zvkn/vaesem.vv.yaml index cb29270c0..21412e016 100644 --- a/arch/inst/Zvkn/vaesem.vv.yaml +++ b/arch/inst/Zvkn/vaesem.vv.yaml @@ -21,6 +21,6 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/inst/Zvkn/vaeskf1.vi.yaml b/arch/inst/Zvkn/vaeskf1.vi.yaml index acfc9c1f4..1e817ae46 100644 --- a/arch/inst/Zvkn/vaeskf1.vi.yaml +++ b/arch/inst/Zvkn/vaeskf1.vi.yaml @@ -23,6 +23,6 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/inst/Zvkn/vaeskf2.vi.yaml b/arch/inst/Zvkn/vaeskf2.vi.yaml index bb5e68084..25fafcc02 100644 --- a/arch/inst/Zvkn/vaeskf2.vi.yaml +++ b/arch/inst/Zvkn/vaeskf2.vi.yaml @@ -23,6 +23,6 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/inst/Zvkn/vaesz.vs.yaml b/arch/inst/Zvkn/vaesz.vs.yaml index 8d471a615..c7876c99c 100644 --- a/arch/inst/Zvkn/vaesz.vs.yaml +++ b/arch/inst/Zvkn/vaesz.vs.yaml @@ -3,7 +3,7 @@ $schema: "inst_schema.json#" kind: instruction name: vaesz.vs -long_name: No synopsis available. +long_name: Vector AES round zero description: | No description available. definedBy: @@ -21,6 +21,6 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/inst/Zvkn/vsha2ch.vv.yaml b/arch/inst/Zvkn/vsha2ch.vv.yaml index c7c307289..99209281e 100644 --- a/arch/inst/Zvkn/vsha2ch.vv.yaml +++ b/arch/inst/Zvkn/vsha2ch.vv.yaml @@ -23,6 +23,6 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/inst/Zvkn/vsha2cl.vv.yaml b/arch/inst/Zvkn/vsha2cl.vv.yaml index af0a15046..c7bee332a 100644 --- a/arch/inst/Zvkn/vsha2cl.vv.yaml +++ b/arch/inst/Zvkn/vsha2cl.vv.yaml @@ -23,6 +23,6 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/inst/Zvkn/vsha2ms.vv.yaml b/arch/inst/Zvkn/vsha2ms.vv.yaml index 3ed25d80f..87c423c10 100644 --- a/arch/inst/Zvkn/vsha2ms.vv.yaml +++ b/arch/inst/Zvkn/vsha2ms.vv.yaml @@ -23,6 +23,6 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/inst/Zvks/vsm3c.vi.yaml b/arch/inst/Zvks/vsm3c.vi.yaml index ce7ee47ad..b18a8df08 100644 --- a/arch/inst/Zvks/vsm3c.vi.yaml +++ b/arch/inst/Zvks/vsm3c.vi.yaml @@ -23,6 +23,6 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/inst/Zvks/vsm3me.vv.yaml b/arch/inst/Zvks/vsm3me.vv.yaml index 083bc526b..40bd15370 100644 --- a/arch/inst/Zvks/vsm3me.vv.yaml +++ b/arch/inst/Zvks/vsm3me.vv.yaml @@ -23,6 +23,6 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/inst/Zvks/vsm4k.vi.yaml b/arch/inst/Zvks/vsm4k.vi.yaml index fb641d68f..0c63c792d 100644 --- a/arch/inst/Zvks/vsm4k.vi.yaml +++ b/arch/inst/Zvks/vsm4k.vi.yaml @@ -23,6 +23,6 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/inst/Zvks/vsm4r.vs.yaml b/arch/inst/Zvks/vsm4r.vs.yaml index 9fa81255a..953c4fb6f 100644 --- a/arch/inst/Zvks/vsm4r.vs.yaml +++ b/arch/inst/Zvks/vsm4r.vs.yaml @@ -21,6 +21,6 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/inst/Zvks/vsm4r.vv.yaml b/arch/inst/Zvks/vsm4r.vv.yaml index 40f6c8b67..30624d912 100644 --- a/arch/inst/Zvks/vsm4r.vv.yaml +++ b/arch/inst/Zvks/vsm4r.vv.yaml @@ -21,6 +21,6 @@ access: u: always vs: always vu: always -data_independent_timing: false +data_independent_timing: true operation(): | diff --git a/arch/manual/isa/isa.yaml b/arch/manual/isa.yaml similarity index 74% rename from arch/manual/isa/isa.yaml rename to arch/manual/isa.yaml index ed762ec09..56da9d21a 100644 --- a/arch/manual/isa/isa.yaml +++ b/arch/manual/isa.yaml @@ -1,3 +1,7 @@ +# yaml-language-server: $schema=../../schemas/manual_schema.json + +$schema: manual_schema.json# +kind: manual name: isa marketing_name: RISC-V ISA Manual url: https://github.com/riscv/riscv-isa-manual diff --git a/arch/manual/isa/20240411/contents.yaml b/arch/manual_version/isa/20240411/isa_20240411.yaml similarity index 59% rename from arch/manual/isa/20240411/contents.yaml rename to arch/manual_version/isa/20240411/isa_20240411.yaml index 91901d70a..69eb0850a 100644 --- a/arch/manual/isa/20240411/contents.yaml +++ b/arch/manual_version/isa/20240411/isa_20240411.yaml @@ -1,12 +1,14 @@ # yaml-language-server: $schema=../../../../schemas/manual_version_schema.json --- -manual: isa +$schema: manual_version_schema.json# +kind: manual version +manual: { $ref: manual/isa.yaml# } version: "4.0.0-pre" -name: "20240411" +name: "isa_20240411" marketing_version: "20240411" state: "development" -url: https://github.com/riscv/riscv-isa-manual/releases/tag/20240411 +url: https://github.com/riscv/releases/tag/20240411 uses_isa_manual: true isa_manual_tree: "tags/20240411" volumes: @@ -19,51 +21,51 @@ volumes: name: SiFive, Inc. url: https://www.sifive.com/ chapters: - - riscv-isa-manual/src/colophon.adoc - - riscv-isa-manual/src/intro.adoc - - riscv-isa-manual/src/rv32.adoc - - riscv-isa-manual/src/rv32e.adoc - - riscv-isa-manual/src/rv64.adoc - - riscv-isa-manual/src/rv128.adoc - - riscv-isa-manual/src/zifencei.adoc - - riscv-isa-manual/src/zicsr.adoc - - riscv-isa-manual/src/counters.adoc - - riscv-isa-manual/src/zihintntl.adoc - - riscv-isa-manual/src/zihintpause.adoc - - riscv-isa-manual/src/zimop.adoc - - riscv-isa-manual/src/zicond.adoc - - riscv-isa-manual/src/m-st-ext.adoc - - riscv-isa-manual/src/a-st-ext.adoc - - riscv-isa-manual/src/zawrs.adoc - - riscv-isa-manual/src/zacas.adoc - - riscv-isa-manual/src/rvwmo.adoc - - riscv-isa-manual/src/ztso-st-ext.adoc - - riscv-isa-manual/src/cmo.adoc - - riscv-isa-manual/src/f-st-ext.adoc - - riscv-isa-manual/src/d-st-ext.adoc - - riscv-isa-manual/src/q-st-ext.adoc - - riscv-isa-manual/src/zfh.adoc - - riscv-isa-manual/src/zfa.adoc - - riscv-isa-manual/src/zfinx.adoc - - riscv-isa-manual/src/c-st-ext.adoc - - riscv-isa-manual/src/zc.adoc - - riscv-isa-manual/src/b-st-ext.adoc - - riscv-isa-manual/src/j-st-ext.adoc - - riscv-isa-manual/src/p-st-ext.adoc - - riscv-isa-manual/src/v-st-ext.adoc - - riscv-isa-manual/src/scalar-crypto.adoc - - riscv-isa-manual/src/vector-crypto.adoc - - riscv-isa-manual/src/rv-32-64g.adoc - - riscv-isa-manual/src/extending.adoc - - riscv-isa-manual/src/naming.adoc - - riscv-isa-manual/src/history.adoc - - riscv-isa-manual/src/mm-eplan.adoc - - riscv-isa-manual/src/mm-formal.adoc + - src/colophon.adoc + - src/intro.adoc + - src/rv32.adoc + - src/rv32e.adoc + - src/rv64.adoc + - src/rv128.adoc + - src/zifencei.adoc + - src/zicsr.adoc + - src/counters.adoc + - src/zihintntl.adoc + - src/zihintpause.adoc + - src/zimop.adoc + - src/zicond.adoc + - src/m-st-ext.adoc + - src/a-st-ext.adoc + - src/zawrs.adoc + - src/zacas.adoc + - src/rvwmo.adoc + - src/ztso-st-ext.adoc + - src/cmo.adoc + - src/f-st-ext.adoc + - src/d-st-ext.adoc + - src/q-st-ext.adoc + - src/zfh.adoc + - src/zfa.adoc + - src/zfinx.adoc + - src/c-st-ext.adoc + - src/zc.adoc + - src/b-st-ext.adoc + - src/j-st-ext.adoc + - src/p-st-ext.adoc + - src/v-st-ext.adoc + - src/scalar-crypto.adoc + - src/vector-crypto.adoc + - src/rv-32-64g.adoc + - src/extending.adoc + - src/naming.adoc + - src/history.adoc + - src/mm-eplan.adoc + - src/mm-formal.adoc #Appendices for Vector - - riscv-isa-manual/src/vector-examples.adoc - - riscv-isa-manual/src/calling-convention.adoc + - src/vector-examples.adoc + - src/calling-convention.adoc #/End of Vector appendices - - riscv-isa-manual/src/index.adoc + - src/index.adoc extensions: - [I, "2.1.0"] - [U, "1.12.0"] @@ -133,9 +135,6 @@ volumes: - [Zvfh, "1.0.0"] - [Zvknha, "1.0.0"] - [Zvknhb, "1.0.0"] - - [Zbkb, "1.0.0"] - - [Zbkc, "1.0.0"] - - [Zbkx, "1.0.0"] - [Zknd, "1.0.0"] - [Zkne, "1.0.0"] - [Zknh, "1.0.0"] @@ -178,23 +177,23 @@ volumes: name: SiFive, Inc. url: https://www.sifive.com/ chapters: - - riscv-isa-manual/src/priv-preface.adoc - - riscv-isa-manual/src/priv-intro.adoc - - riscv-isa-manual/src/priv-csrs.adoc - - riscv-isa-manual/src/machine.adoc - - riscv-isa-manual/src/smstateen.adoc - - riscv-isa-manual/src/indirect-csr.adoc - - riscv-isa-manual/src/smepmp.adoc - - riscv-isa-manual/src/smcntrpmf.adoc - - riscv-isa-manual/src/rnmi.adoc - - riscv-isa-manual/src/smcdeleg.adoc - - riscv-isa-manual/src/supervisor.adoc - - riscv-isa-manual/src/sstc.adoc - - riscv-isa-manual/src/sscofpmf.adoc - - riscv-isa-manual/src/hypervisor.adoc - - riscv-isa-manual/src/priv-insns.adoc - - riscv-isa-manual/src/priv-history.adoc - - riscv-isa-manual/src/bibliography.adoc + - src/priv-preface.adoc + - src/priv-intro.adoc + - src/priv-csrs.adoc + - src/machine.adoc + - src/smstateen.adoc + - src/indirect-csr.adoc + - src/smepmp.adoc + - src/smcntrpmf.adoc + - src/rnmi.adoc + - src/smcdeleg.adoc + - src/supervisor.adoc + - src/sstc.adoc + - src/sscofpmf.adoc + - src/hypervisor.adoc + - src/priv-insns.adoc + - src/priv-history.adoc + - src/bibliography.adoc extensions: - [Smstateen, "1.0.0"] - [Smcsrind, "1.0.0"] @@ -216,6 +215,5 @@ volumes: - [Svinval, "1.0.0"] - [Svadu, "1.0.0"] - [Svvptc, "1.0.0"] - - [Sstc, "1.0.0"] - [Sscofpmf, "1.0.0"] - [H, "1.0.0"] \ No newline at end of file diff --git a/arch/profile/MP-S-64.yaml b/arch/profile/MP-S-64.yaml new file mode 100644 index 000000000..66d150b50 --- /dev/null +++ b/arch/profile/MP-S-64.yaml @@ -0,0 +1,68 @@ +# yaml-language-server: $schema=../../schemas/profile_schema.json + +$schema: profile_schema.json# +kind: profile +name: MP-S-64 +marketing_name: MockProfile 64-bit S-mode +description: This is the Mock Profile Supervisor Mode description. +mode: S +base: 64 +release: { $ref: profile_release/MockProfileRelease.yaml# } +contributors: +- name: Micky Mouse + email: micky@disney.com + company: Disney +extensions: + $inherits: "profile/MP-U-64.yaml#/extensions" + A: + presence: mandatory + note: This should be listed as mandatory in MP-S-64 and optional in MP-U-64. + S: + presence: + optional: localized + version: "= 1.12" + Zifencei: + presence: + optional: development + version: "= 2.0" + note: + Zihpm: + presence: + optional: expansion + version: "= 2.0" + note: Made this a expansion option + Sv48: + presence: + optional: transitory + version: "= 1.11" + note: Made this a transitory option +extra_notes: +- presence: mandatory + text: | + Here's the first extra note for the mandatory extensions section. + This note is multiple lines. +- presence: optional + text: | + Here's the first extra note for the optional extensions section. + In this case, we don't differentiate between optional types. + This note is multiple lines. +- presence: + optional: localized + text: Here's the first extra note for the localized optional extensions section. +- presence: + optional: localized + text: Here's the second extra note for the localized optional extensions section. +- presence: + optional: development + text: Here's the first extra note for the development optional extensions section. +- presence: + optional: expansion + text: Here's the first extra note for the expansion optional extensions section. +- presence: + optional: transitory + text: Here's the first extra note for the transitory optional extensions section. +recommendations: +- text: | + Implementations are strongly recommended to raise illegal-instruction + exceptions on attempts to execute unimplemented opcodes. +- text: Micky should give Pluto an extra treat \ No newline at end of file diff --git a/arch/profile/MP-U-64.yaml b/arch/profile/MP-U-64.yaml new file mode 100644 index 000000000..3273c323b --- /dev/null +++ b/arch/profile/MP-U-64.yaml @@ -0,0 +1,17 @@ +$schema: profile_schema.json# +kind: profile +name: MP-U-64 +marketing_name: MockProfile 64-bit Unpriv +mode: Unpriv +base: 64 +release: { $ref: profile_release/MockProfileRelease.yaml# } +extensions: + A: + presence: optional + version: "= 2.1" + I: + presence: mandatory + version: "~> 2.1" + Svade: + presence: mandatory + note: Adding this to get coverage when extension "conflicts" with another (Svadu in this case). diff --git a/arch/profile/RVA20S64.yaml b/arch/profile/RVA20S64.yaml new file mode 100644 index 000000000..1d205d4d8 --- /dev/null +++ b/arch/profile/RVA20S64.yaml @@ -0,0 +1,63 @@ +$schema: profile_schema.json# +kind: profile +name: RVA20S64 +marketing_name: RVA20S64 +mode: S +base: 64 +release: { $ref: profile_release/RVA20.yaml# } +introduction: | + The RVA20S64 profile specifies the ISA features available to a + supervisor-mode execution environment in 64-bit applications + processors. RVA20S64 is based on privileged architecture version 1.11. +extensions: + $inherits: "profile/RVI20U64.yaml#/extensions" + S: + presence: mandatory + version: "= 1.11" + Zifencei: + presence: mandatory + version: "= 2.0" + note: | + Zifencei is mandated as it is the only standard way to support + instruction-cache coherence in RVA20 application processors. A new + instruction-cache coherence mechanism is under development which might + be added as an option in the future. + Svbare: + presence: mandatory + version: "= 1.0" + note: | + Svbare is a new extension name introduced with RVA20. + Sv39: + presence: mandatory + version: "= 1.11" + Svade: + presence: mandatory + version: "~> 1.0" + note: | + Svbare is a new extension name introduced with RVA20. + + It is subsequently defined in more detail with the ratification of + `Svadu`. + Ssccptr: + presence: mandatory + version: "= 1.0" + note: | + Ssccptr is a new extension name introduced with RVA20. + Sstvecd: + presence: mandatory + version: "= 1.0" + note: | + Sstvecd is a new extension name introduced with RVA20. + Sstvala: + presence: mandatory + version: "= 1.0" + note: | + Sstvala is a new extension name introduced with RVA20. + Sv48: + presence: optional + version: "= 1.11" + Ssu64xl: + presence: optional + version: "= 1.0" + note: | + Ssu64xl is a new extension name introduced with RVA20. \ No newline at end of file diff --git a/arch/profile/RVA20U64.yaml b/arch/profile/RVA20U64.yaml new file mode 100644 index 000000000..32138643c --- /dev/null +++ b/arch/profile/RVA20U64.yaml @@ -0,0 +1,98 @@ +$schema: profile_schema.json# +kind: profile +name: RVA20U64 +marketing_name: RVA20U64 +mode: Unpriv +base: 64 +release: { $ref: profile_release/RVA20.yaml# } +introduction: | + The RVA20U64 profile specifies the ISA features available to user-mode + execution environments in 64-bit applications processors. This is the + most important profile within application processors in + terms of the amount of software that targets this profile. +extensions: + $inherits: "profile/RVI20U64.yaml#/extensions" + $remove: Zifencei # Not allowed as an option for Unpriv ISA (only available in Priv ISA). + A: + presence: mandatory + C: + presence: mandatory + D: + presence: mandatory + F: + presence: mandatory + M: + presence: mandatory + U: + presence: mandatory + version: "~> 2.0" + param_constraints: + U_MODE_ENDIANESS: + schema: + const: little + Zicntr: + presence: mandatory + Ziccif: + presence: mandatory + version: "= 1.0" + note: | + Ziccif is a profile-defined extension introduced with RVA20. + The fetch atomicity requirement facilitates runtime patching + of aligned instructions. + Ziccrse: + presence: mandatory + version: "= 1.0" + note: Ziccrse is a profile-defined extension introduced with RVA20. + Ziccamoa: + presence: mandatory + version: "= 1.0" + note: Ziccamo is a profile-defined extension introduced with RVA20. + Za128rs: + presence: mandatory + version: "= 1.0" + note: | + Za128rs is a profile-defined extension introduced with RVA20. + The minimum reservation set size is effectively determined by the + size of atomic accesses in the `A` extension. + Zicclsm: + presence: mandatory + version: "= 1.0" + note: | + Zicclsm is a profile-defined extension introduced with RVA20. + This requires misaligned support for all regular load and store + instructions (including scalar and vector) but not AMOs or other + specialized forms of memory access. Even though mandated, misaligned + loads and stores might execute extremely slowly. Standard software + distributions should assume their existence only for correctness, not + for performance. +extra_notes: +- presence: optional + text: | + The rationale to not make Q an optional extension is that + quad-precision floating-point is unlikely to be implemented in + hardware, and so we do not require or expect A-profile software to + expend effort optimizing use of Q instructions in case they are + present. +- presence: optional + text: | + Zifencei is not classed as a supported option in the user-mode + profile because it is not sufficient by itself to produce the desired + effect in a multiprogrammed multiprocessor environment without OS + support, and so the instruction cache flush should always be performed + using an OS call rather than using the `fence.i` instruction. + `fence.i` semantics can be expensive to implement for some hardware + memory hierarchy designs, and so alternative non-standard + instruction-cache coherence mechanisms can be used behind the OS + abstraction. A separate extension is being developed for more general + and efficient instruction cache coherence. +- presence: optional + text: | + The execution environment must provide a means to synchronize writes to + instruction memory with instruction fetches, the implementation of which + likely relies on the Zifencei extension. + For example, RISC-V Linux supplies the `__riscv_flush_icache` system call and + a corresponding vDSO call. +recommendations: +- text: | + Implementations are strongly recommended to raise illegal-instruction + exceptions on attempts to execute unimplemented opcodes. \ No newline at end of file diff --git a/arch/profile/RVA22S64.yaml b/arch/profile/RVA22S64.yaml new file mode 100644 index 000000000..077077617 --- /dev/null +++ b/arch/profile/RVA22S64.yaml @@ -0,0 +1,104 @@ +$schema: profile_schema.json# +kind: profile +name: RVA22S64 +marketing_name: RVA22S64 +mode: S +base: 64 +release: { $ref: profile_release/RVA22.yaml# } +introduction: | + The RVA22S64 profile specifies the ISA features available to a + supervisor-mode execution environment in 64-bit applications + processors. RVA22S64 is based on privileged architecture version + 1.12. +extensions: + $inherits: "profile/RVA20S64.yaml#/extensions" + S: + presence: mandatory + version: "= 1.12" + Sscounterenw: + presence: mandatory + version: "= 1.0" + note: | + Sstvala is a new extension name introduced with RVA22. + Svpbmt: + presence: mandatory + version: "~> 1.0" + Svinval: + presence: mandatory + version: "~> 1.0" + Ssstateen: + presence: mandatory + version: "~> 1.0" + when: + implemented: H + note: | + Ssstateen is a new extension name introduced with RVA22. + Shvstvala: + presence: mandatory + version: "~> 1.0" + when: + implemented: H + note: | + Shvstvala is a new extension name introduced with RVA22. + Shtvala: + presence: mandatory + version: "~> 1.0" + when: + implemented: H + note: | + Shtvala is a new extension name introduced with RVA22. + Shvstvecd: + presence: mandatory + version: "~> 1.0" + when: + implemented: H + note: | + Shvstvecd is a new extension name introduced with RVA22. + Shgatpa: + presence: mandatory + version: "~> 1.0" + when: + implemented: H + note: | + Shgatpa is a new extension name introduced with RVA22. + Sv57: + presence: optional + version: "~> 1.12" + Svnapot: + presence: optional + version: "~> 1.0" + note: | + It is expected that Svnapot will be mandatory in the next + profile release. + Sstc: + presence: optional + version: "~> 1.0" + note: | + Sstc was not made mandatory in RVA22S64 as it is a more + disruptive change affecting system-level architecture, and will take + longer for implementations to adopt. It is expected to be made + mandatory in the next profile release. + Sscofpmf: + presence: optional + version: "~> 1.0" + note: | + Platforms may choose to mandate the presence of Sscofpmf. + Zkr: + presence: optional + version: "~> 1.0" + note: | + Technically, Zk is also a privileged-mode option capturing that + Zkr, Zkn, and Zkt are all implemented. However, the Zk rollup is less + descriptive than specifying the individual extensions explicitly. + H: + presence: optional + version: "~> 1.0" + note: | + The following extensions become mandatory when H is implemented: + + * Ssstateen + * Shcounterenw + * Shvstvala + * Shtvala + * Shvstvecd + * Shgatpa \ No newline at end of file diff --git a/arch/profile/RVA22U64.yaml b/arch/profile/RVA22U64.yaml new file mode 100644 index 000000000..1129b4efc --- /dev/null +++ b/arch/profile/RVA22U64.yaml @@ -0,0 +1,126 @@ +$schema: profile_schema.json# +kind: profile +name: RVA22U64 +marketing_name: RVA22U64 +mode: Unpriv +base: 64 +release: { $ref: profile_release/RVA22.yaml# } +introduction: | + The RVA22U64 profile specifies the ISA features available to user-mode + execution environments in 64-bit applications processors. This is the + most important profile within application processors in + terms of the amount of software that targets this profile. +extensions: + $inherits: "profile/RVA20U64.yaml#/extensions" + Zihpm: + presence: mandatory + version: "= 2.0" + Zihintpause: + presence: mandatory + version: "= 2.0" + note: | + While the `pause` instruction is a HINT can be implemented as a + NOP and hence trivially supported by hardware implementers, its + inclusion in the mandatory extension list signifies that software + should use the instruction whenever it would make sense and that + implementors are expected to exploit this information to optimize + hardware execution. + Zba: + presence: mandatory + version: "~> 1.0" + Zbb: + presence: mandatory + version: "~> 1.0" + Zbs: + presence: mandatory + version: "~> 1.0" + Zic64b: + presence: mandatory + version: "= 1.0" + note: | + This is a new extension name for this feature. While the general + RISC-V specifications are agnostic to cache block size, selecting a + common cache block size simplifies the specification and use of the + following cache-block extensions within the application processor + profile. Software does not have to query a discovery mechanism and/or + provide dynamic dispatch to the appropriate code. We choose 64 bytes + at it is effectively an industry standard. Implementations may use + longer cache blocks to reduce tag cost provided they use 64-byte + sub-blocks to remain compatible. Implementations may use shorter cache + blocks provided they sequence cache operations across the multiple + cache blocks comprising a 64-byte block to remain compatible. + Zicbom: + presence: mandatory + version: "~> 1.0" + Zicbop: + presence: mandatory + version: "~> 1.0" + note: | + As with other HINTS, the inclusion of prefetches in the + mandatory set of extensions indicates that software should generate + these instructions where they are expected to be useful, and hardware + is expected to exploit that information. + Zicboz: + presence: mandatory + version: "~> 1.0" + Zfhmin: + presence: mandatory + version: "~> 1.0" + note: | + Zfhmin is a small extension that adds support to load/store and convert + IEEE 754 half-precision numbers to and from the IEEE 754 single-precision + format. The hardware cost for this extension is low, and mandating the + extension avoids adding an option to the profile. + Zkt: + presence: mandatory + version: "~> 1.0" + note: | + Zkt requires a certain subset of integer instructions execute + with data-independent latency. Mandating this feature enables + portable libraries for safe basic cryptographic operations. It is + expected that application processors will naturally have this property + and so implementation cost is low, if not zero, in most systems that + would support RVA22. + Zfh: + presence: optional + version: "~> 1.0" + note: A future profile might mandate V. + V: + presence: optional + version: "~> 1.0" + note: | + The smaller vector extensions (Zve32f, Zve32x, Zve64d, Zve64f, + Zve64x) are not provided as separately supported profile options. The + full V extension is specified as the only supported profile option. + + A future profile might mandate V. + Zkn: + presence: optional + version: "~> 1.0" + Zks: + presence: optional + version: "~> 1.0" +extra_notes: +- presence: optional + text: | + The scalar crypto extensions are expected to be superseded by + vector crypto standards in future profiles, and the scalar extensions + may be removed as supported options once vector crypto is present. +- presence: optional + text: | + The smaller component scalar crypto extensions (Zbc, Zbkb, Zbkc, + Zbkx, Zknd, Zkne, Zknh, Zksed, Zksh) are not provided as separate + options in the profile. Profile implementers should provide all of + the instructions in a given algorithm suite as part of the Zkn or Zks + supported options. +- presence: optional + text: | + Access to the entropy source (Zkr) in a system is usually + carefully controlled. While the design supports unprivileged access + to the entropy source, this is unlikely to be commonly used in an + application processor, and so Zkr was not added as a profile option. + This also means the roll-up Zk was not added as a profile option. +- presence: optional + text: | + The Zfinx, Zdinx, Zhinx, Zhinxmin extensions are incompatible + with the profile mandates to support the F and D extensions. \ No newline at end of file diff --git a/arch/profile/RVI20U32.yaml b/arch/profile/RVI20U32.yaml new file mode 100644 index 000000000..f0e280148 --- /dev/null +++ b/arch/profile/RVI20U32.yaml @@ -0,0 +1,68 @@ +$schema: profile_schema.json# +kind: profile +name: RVI20U32 +marketing_name: RVI20U32 +mode: Unpriv +base: 32 +release: { $ref: profile_release/RVI20.yaml# } +introduction: | + This profile specifies the ISA features available to generic unprivileged + execution environments. +extensions: + I: + presence: mandatory + version: "~> 2.1" + note: | + RVI is the mandatory base ISA for RVA, and is little-endian. + + As per the unprivileged architecture specification, the `ecall` + instruction causes a requested trap to the execution environment. + + Misaligned loads and stores might not be supported. + + The `fence.tso` instruction is mandatory. + + NOTE: The `fence.tso` instruction was incorrectly described as + optional in the 2019 ratified specifications. However, `fence.tso` is + encoded within the standard `fence` encoding such that implementations + must treat it as a simple global fence if they do not natively support + TSO-ordering optimizations. As software can always assume without any + penalty that `fence.tso` is being exploited by a hardware + implementation, there is no advantage to making the instruction a + profile option. Later versions of the unprivileged ISA specifications + correctly indicate that `fence.tso` is mandatory. + A: + presence: optional + version: "= 2.1" + C: + presence: optional + version: "= 2.2" + D: + presence: optional + version: "= 2.2" + note: | + NOTE: The rationale to not include Q as a profile option is that + quad-precision floating-point is unlikely to be implemented in + hardware, and so we do not require or expect software to expend effort + optimizing use of Q instructions in case they are present. + F: + presence: optional + version: "= 2.2" + M: + presence: optional + version: "= 2.0" + Zicntr: + presence: optional + version: "= 2.0" + Zihpm: + presence: optional + version: "= 2.0" + note: | + The number of counters is platform-specific. + Zifencei: + presence: optional + version: "= 2.0" +recommendations: +- text: | + Implementations are strongly recommended to raise illegal-instruction + exceptions on attempts to execute unimplemented opcodes. \ No newline at end of file diff --git a/arch/profile/RVI20U64.yaml b/arch/profile/RVI20U64.yaml new file mode 100644 index 000000000..5b91ecb2a --- /dev/null +++ b/arch/profile/RVI20U64.yaml @@ -0,0 +1,6 @@ +$schema: profile_schema.json# +kind: profile +name: RVI20U64 +$inherits: "profile/RVI20U32.yaml#" +base: 64 +marketing_name: RVI20U64 \ No newline at end of file diff --git a/arch/profile_class/MockProfileClass.yaml b/arch/profile_class/MockProfileClass.yaml index d7f2af6e7..fa9f7b428 100644 --- a/arch/profile_class/MockProfileClass.yaml +++ b/arch/profile_class/MockProfileClass.yaml @@ -1,15 +1,17 @@ -MockProfileClass: - marketing_name: Mock Profile Class - introduction: Here's the Mock Profile Class introduction. - description: | - This is the Mock Profile Class description. - It can be longer than the introduction since it gets its own sub-heading. - naming_scheme: | - Here's the Mock Profile Class naming scheme. - company: - name: RISC-V International - url: https://riscv.org - doc_license: - name: Creative Commons Attribution 4.0 International License - url: https://creativecommons.org/licenses/by/4.0/ - text_url: https://creativecommons.org/licenses/by/4.0/legalcode.txt \ No newline at end of file +$schema: profile_class_schema.json# +kind: profile class +name: MockProfileClass +marketing_name: Mock Profile Class +introduction: Here's the Mock Profile Class introduction. +description: | + This is the Mock Profile Class description. + It can be longer than the introduction since it gets its own sub-heading. +naming_scheme: | + Here's the Mock Profile Class naming scheme. +company: + name: RISC-V International + url: https://riscv.org +doc_license: + name: Creative Commons Attribution 4.0 International License + url: https://creativecommons.org/licenses/by/4.0/ + text_url: https://creativecommons.org/licenses/by/4.0/legalcode.txt \ No newline at end of file diff --git a/arch/profile_class/RVA.yaml b/arch/profile_class/RVA.yaml index db88fc9cc..44a61e8ae 100644 --- a/arch/profile_class/RVA.yaml +++ b/arch/profile_class/RVA.yaml @@ -1,145 +1,149 @@ -RVA: - marketing_name: RVA - introduction: | - The RVA profile class targets application processors for markets - requiring a high-degree of binary compatibility between compliant implementations. - description: | - RISC-V was designed to provide a highly modular and extensible - instruction set and includes a large and growing set of standard - extensions, where each standard extension is a bundle of - instruction-set features. This is no different than other industry - ISAs that continue to add new ISA features. Unlike other ISAs, - however, RISC-V has a broad set of contributors and implementers, and - also allows users to add their own custom extensions. For some deep - embedded markets, highly customized processor configurations are - desirable for efficiency, and all software is compiled, ported, and/or - developed in-house by the same organization for that specific - processor configuration. However, for other markets that expect a - substantial fraction of software to be delivered to end-customers in - binary form, compatibility across multiple implementations from - different RISC-V vendors is required. - - The RVIA ISA extension ratification process ensures that all processor - vendors have agreed to the specification of a standard extension if - present. However, by themselves, the ISA extension specifications do - not guarantee that a certain set of standard extensions will be - present in all implementations. - - *The primary goal of the RVA profiles is to align processor vendors - targeting binary software markets, so software can rely on the - existence of a certain set of ISA features in a particular generation - of RISC-V implementations.* - - Alignment is not only for compatibility, but also to ensure RISC-V is - competitive in these markets. The binary app markets are also - generally those with the most competitive performance requirements - (e.g., mobile, client, server). RVIA cannot mandate the ISA features - that a RISC-V binary software ecosystem should use, as each ecosystem - will typically select the lowest-common denominator they empirically - observe in the deployed devices in their target markets. But RVIA can - align hardware vendors to support a common set of features in each - generation through the RVA profiles. Without proactive alignment - through RVA profiles, RISC-V will be uncompetitive, as even if a - particular vendor implements a certain feature, if other vendors do - not, then binary distributions will not generally use that feature and - all implementations will suffer. While certain features may be - discoverable, and alternate code provided in case of presence/absence - of a feature, the added cost to support such options is only justified - for certain limited cases, and binary app markets will not support a - wide range of optional features, particularly for the nascent RISC-V - binary app ecosystems. - - To maintain alignment and increase RISC-V competitiveness over time, - the mandatory set of extensions must increase over time in successive - generations of RVA profile. (RVA profiles may eventually have to - deprecate previously mandatory instructions, but that is unlikely in - the near future.) Note that the RISC-V ISA will continue to evolve, - regardless of whether a given software ecosystem settles on a certain - generation of profile as the baseline for their ecosystem for many - years or even decades. There are many existing binary software - ecosystems, which will migrate to RISC-V and evolve at different rates, - and more new ones will doubtless be created over the hopefully long - lifetime of RISC-V. High-performance application processors require - considerable investment, and no single binary app ecosystem can - justify the development costs of these processors, especially for - RISC-V in its early stage of adoption. - - While the heart of the profile is the set of mandatory extensions, - there are several kinds of optional extension that serve important - roles in the profile. - - The first kind are _localized_ _options_, whose presence or use - necessarily differs along geo-political and/or jurisdictional - boundaries, with crypto being the obvious example. These will always - be optional. At least for crypto, discovery has been found to be - perfectly acceptable to handle this optionality on other - architectures, as the use of the extensions is well contained in - certain libraries. - - The second kind of optional extension is a _development_ _option_, - which represents a new ISA extension in an early part of its lifecycle - but which is intended to become mandatory in a later generation of the - RVA profile. Processor vendors and software toolchain providers will - have varying development schedules, and providing an optional phase in - a new extension's lifecycle provides some flexibility while - maintaining overall alignment, and is particularly appropriate when - hardware or software development for the extension is complex. - Denoting an extension as a _development_ _option_ signals to the - community that development should be prioritized for such extensions - as they will become mandatory. - - The third kind of optional extension are _expansion_ _options_, which - are those that may have a large implementation cost but are not always - needed in a particular platform, and which can be readily handled by - discovery. These are also intended to remain available as expansion - options in future versions of the profile. Several supervisor-mode - extensions fall into this category, e.g., Sv57, which has a notable - PPA impact over Sv48 and is not needed on smaller platforms. Some - unprivileged extensions that may fall into this category are possible - future matrix extensions. These have large implementation costs, and - use of matrix instructions can be readily supported with discovery and - alternate math libraries. - - The fourth kind of optional extensions are _transitory_ _options_, - where it is not clear if the extension will change to a mandatory, - localized, or expansion option, or be possibly dropped over time. - Cryptography provides some examples where earlier cyphers have been - broken and are now deprecated. RVIA used this mechanism to enable - scalar crypto until vector crypto was ready. Software security - features may also be in this category, with examples of deprecated - security features occuring in other architectures. As another - example, the recent avalanche of new numeric datatypes for AI/ML may - eventually subside with a few survivors actually being used longer - term. Denoting an option as transitory signals to the community that - this extension may be removed in a future profile, though the time - scale may span many years. - - Except for the localized options, it could be argued that other three - kinds of option could be left out of profiles. Binary distributions - of applications willing to invest in discovery can use an optional - extension, and customers compiling their own applications can take - advantage of the feature on a particular implementation, even when - that system is mostly running binary distributions that ignore the new - extension. However, there is value in providing guidance to align - hardware vendors and software developers around what extensions are - worth implementing and worth discovering, by designating only a few - important features as profile options and limiting their granularity. - naming_scheme: | - The profile class name is RVA (RISC-V Apps processor). - A profile release name is an integer (currently 2 digits, could grow in the future). - A full profile name is comprised of, in order: +# yaml-language-server: $schema=../../schemas/profile_class_schema.json - * Prefix *RVA* for RISC-V Applications - * Profile release - * Privilege mode: - ** *U* Unprivileged (available to any privilege mode, *U* is *not* User-mode) - ** *S* Supervisor mode (note that Hypervisor support is treated as an option) - ** *M* Machine mode - * A base ISA XLEN specifier (*32*, *64*) - company: - name: RISC-V International - url: https://riscv.org - doc_license: - name: Creative Commons Attribution 4.0 International License - url: https://creativecommons.org/licenses/by/4.0/ - text_url: https://creativecommons.org/licenses/by/4.0/legalcode.txt \ No newline at end of file +$schema: profile_class_schema.json# +kind: profile class +name: RVA +marketing_name: RVA +introduction: | + The RVA profile class targets application processors for markets + requiring a high-degree of binary compatibility between compliant implementations. +description: | + RISC-V was designed to provide a highly modular and extensible + instruction set and includes a large and growing set of standard + extensions, where each standard extension is a bundle of + instruction-set features. This is no different than other industry + ISAs that continue to add new ISA features. Unlike other ISAs, + however, RISC-V has a broad set of contributors and implementers, and + also allows users to add their own custom extensions. For some deep + embedded markets, highly customized processor configurations are + desirable for efficiency, and all software is compiled, ported, and/or + developed in-house by the same organization for that specific + processor configuration. However, for other markets that expect a + substantial fraction of software to be delivered to end-customers in + binary form, compatibility across multiple implementations from + different RISC-V vendors is required. + + The RVIA ISA extension ratification process ensures that all processor + vendors have agreed to the specification of a standard extension if + present. However, by themselves, the ISA extension specifications do + not guarantee that a certain set of standard extensions will be + present in all implementations. + + *The primary goal of the RVA profiles is to align processor vendors + targeting binary software markets, so software can rely on the + existence of a certain set of ISA features in a particular generation + of RISC-V implementations.* + + Alignment is not only for compatibility, but also to ensure RISC-V is + competitive in these markets. The binary app markets are also + generally those with the most competitive performance requirements + (e.g., mobile, client, server). RVIA cannot mandate the ISA features + that a RISC-V binary software ecosystem should use, as each ecosystem + will typically select the lowest-common denominator they empirically + observe in the deployed devices in their target markets. But RVIA can + align hardware vendors to support a common set of features in each + generation through the RVA profiles. Without proactive alignment + through RVA profiles, RISC-V will be uncompetitive, as even if a + particular vendor implements a certain feature, if other vendors do + not, then binary distributions will not generally use that feature and + all implementations will suffer. While certain features may be + discoverable, and alternate code provided in case of presence/absence + of a feature, the added cost to support such options is only justified + for certain limited cases, and binary app markets will not support a + wide range of optional features, particularly for the nascent RISC-V + binary app ecosystems. + + To maintain alignment and increase RISC-V competitiveness over time, + the mandatory set of extensions must increase over time in successive + generations of RVA profile. (RVA profiles may eventually have to + deprecate previously mandatory instructions, but that is unlikely in + the near future.) Note that the RISC-V ISA will continue to evolve, + regardless of whether a given software ecosystem settles on a certain + generation of profile as the baseline for their ecosystem for many + years or even decades. There are many existing binary software + ecosystems, which will migrate to RISC-V and evolve at different rates, + and more new ones will doubtless be created over the hopefully long + lifetime of RISC-V. High-performance application processors require + considerable investment, and no single binary app ecosystem can + justify the development costs of these processors, especially for + RISC-V in its early stage of adoption. + + While the heart of the profile is the set of mandatory extensions, + there are several kinds of optional extension that serve important + roles in the profile. + + The first kind are _localized_ _options_, whose presence or use + necessarily differs along geo-political and/or jurisdictional + boundaries, with crypto being the obvious example. These will always + be optional. At least for crypto, discovery has been found to be + perfectly acceptable to handle this optionality on other + architectures, as the use of the extensions is well contained in + certain libraries. + + The second kind of optional extension is a _development_ _option_, + which represents a new ISA extension in an early part of its lifecycle + but which is intended to become mandatory in a later generation of the + RVA profile. Processor vendors and software toolchain providers will + have varying development schedules, and providing an optional phase in + a new extension's lifecycle provides some flexibility while + maintaining overall alignment, and is particularly appropriate when + hardware or software development for the extension is complex. + Denoting an extension as a _development_ _option_ signals to the + community that development should be prioritized for such extensions + as they will become mandatory. + + The third kind of optional extension are _expansion_ _options_, which + are those that may have a large implementation cost but are not always + needed in a particular platform, and which can be readily handled by + discovery. These are also intended to remain available as expansion + options in future versions of the profile. Several supervisor-mode + extensions fall into this category, e.g., Sv57, which has a notable + PPA impact over Sv48 and is not needed on smaller platforms. Some + unprivileged extensions that may fall into this category are possible + future matrix extensions. These have large implementation costs, and + use of matrix instructions can be readily supported with discovery and + alternate math libraries. + + The fourth kind of optional extensions are _transitory_ _options_, + where it is not clear if the extension will change to a mandatory, + localized, or expansion option, or be possibly dropped over time. + Cryptography provides some examples where earlier cyphers have been + broken and are now deprecated. RVIA used this mechanism to enable + scalar crypto until vector crypto was ready. Software security + features may also be in this category, with examples of deprecated + security features occuring in other architectures. As another + example, the recent avalanche of new numeric datatypes for AI/ML may + eventually subside with a few survivors actually being used longer + term. Denoting an option as transitory signals to the community that + this extension may be removed in a future profile, though the time + scale may span many years. + + Except for the localized options, it could be argued that other three + kinds of option could be left out of profiles. Binary distributions + of applications willing to invest in discovery can use an optional + extension, and customers compiling their own applications can take + advantage of the feature on a particular implementation, even when + that system is mostly running binary distributions that ignore the new + extension. However, there is value in providing guidance to align + hardware vendors and software developers around what extensions are + worth implementing and worth discovering, by designating only a few + important features as profile options and limiting their granularity. +naming_scheme: | + The profile class name is RVA (RISC-V Apps processor). + A profile release name is an integer (currently 2 digits, could grow in the future). + A full profile name is comprised of, in order: + + * Prefix *RVA* for RISC-V Applications + * Profile release + * Privilege mode: + ** *U* Unprivileged (available to any privilege mode, *U* is *not* User-mode) + ** *S* Supervisor mode (note that Hypervisor support is treated as an option) + ** *M* Machine mode + * A base ISA XLEN specifier (*32*, *64*) +company: + name: RISC-V International + url: https://riscv.org +doc_license: + name: Creative Commons Attribution 4.0 International License + url: https://creativecommons.org/licenses/by/4.0/ + text_url: https://creativecommons.org/licenses/by/4.0/legalcode.txt diff --git a/arch/profile_class/RVB.yaml b/arch/profile_class/RVB.yaml index f9529ef1d..d2e9726c7 100644 --- a/arch/profile_class/RVB.yaml +++ b/arch/profile_class/RVB.yaml @@ -1,50 +1,52 @@ -RVB: - marketing_name: RVB - introduction: | - The RVB profile class targets application processors for markets - running Bespoke (AKA custom, AKA Yocto) Linux Operating Systems - in embedded applications. - description: | - The RVB profile class is intended to be used for 64-bit application - processors running rich OS stacks. Only user-mode and - supervisor-mode profiles are specified in this class. +$schema: profile_class_schema.json# +kind: profile class +name: RVB +marketing_name: RVB +introduction: | + The RVB profile class targets application processors for markets + running Bespoke (AKA custom, AKA Yocto) Linux Operating Systems + in embedded applications. +description: | + The RVB profile class is intended to be used for 64-bit application + processors running rich OS stacks. Only user-mode and + supervisor-mode profiles are specified in this class. - NOTE: There is no machine-mode profile currently defined for RVB. - A machine-mode profile for application processors would only be used in specifying platforms for - portable machine-mode software. Given the relatively low volume of - portable M-mode software in this domain, the wide variety of potential - M-mode code, and the very specific needs of each type of M-mode - software, we are not specifying individual M-mode ISA requirements in - the RVB profiles. + NOTE: There is no machine-mode profile currently defined for RVB. + A machine-mode profile for application processors would only be used in specifying platforms for + portable machine-mode software. Given the relatively low volume of + portable M-mode software in this domain, the wide variety of potential + M-mode code, and the very specific needs of each type of M-mode + software, we are not specifying individual M-mode ISA requirements in + the RVB profiles. - NOTE: Only XLEN=64 application processor profiles are currently - defined. It would be possible to also define very similar XLEN=32 - variants. - naming_scheme: | - The profile class name is RVB (RISC-V Bespoke processor). - A profile release name is a integer (currently 2 digits, could grow in the future). - A full profile name is comprised of, in order: + NOTE: Only XLEN=64 application processor profiles are currently + defined. It would be possible to also define very similar XLEN=32 + variants. +naming_scheme: | + The profile class name is RVB (RISC-V Bespoke processor). + A profile release name is a integer (currently 2 digits, could grow in the future). + A full profile name is comprised of, in order: - * Prefix *RVB* for RISC-V Bespoke - * Profile release - * Privilege mode: - ** *U* Unprivileged (available to any privilege mode, *U* is *not* User-mode) - ** *S* Supervisor mode (note that Hypervisor support is treated as an option) - ** *M* Machine mode - * A base ISA XLEN specifier (*32*, *64*) + * Prefix *RVB* for RISC-V Bespoke + * Profile release + * Privilege mode: + ** *U* Unprivileged (available to any privilege mode, *U* is *not* User-mode) + ** *S* Supervisor mode (note that Hypervisor support is treated as an option) + ** *M* Machine mode + * A base ISA XLEN specifier (*32*, *64*) - The initial profiles based on specifications ratified in 2024 are: + The initial profiles based on specifications ratified in 2024 are: - * RVB23U64, RVB23S64 64-bit application-processor profiles + * RVB23U64, RVB23S64 64-bit application-processor profiles - NOTE: Profile names are embeddable into RISC-V ISA naming strings. - This implies that there will be no standard ISA extension with a name - that matches the profile naming convention. This allows tools that - process the RISC-V ISA naming string to parse and/or process a combined string. - company: - name: RISC-V International - url: https://riscv.org - doc_license: - name: Creative Commons Attribution 4.0 International License - url: https://creativecommons.org/licenses/by/4.0/ - text_url: https://creativecommons.org/licenses/by/4.0/legalcode.txt \ No newline at end of file + NOTE: Profile names are embeddable into RISC-V ISA naming strings. + This implies that there will be no standard ISA extension with a name + that matches the profile naming convention. This allows tools that + process the RISC-V ISA naming string to parse and/or process a combined string. +company: + name: RISC-V International + url: https://riscv.org +doc_license: + name: Creative Commons Attribution 4.0 International License + url: https://creativecommons.org/licenses/by/4.0/ + text_url: https://creativecommons.org/licenses/by/4.0/legalcode.txt \ No newline at end of file diff --git a/arch/profile_class/RVI.yaml b/arch/profile_class/RVI.yaml index ed900ef3d..ef6179b69 100644 --- a/arch/profile_class/RVI.yaml +++ b/arch/profile_class/RVI.yaml @@ -1,28 +1,30 @@ -RVI: - marketing_name: RVI - introduction: The RVI profile class documents the initial set of unprivileged instructions. - description: | - The RVI profile class provides a generic target for software toolchains - and represent the minimum level of compatibility with RISC-V ratified standards. +$schema: profile_class_schema.json# +kind: profile class +name: RVI +marketing_name: RVI +introduction: The RVI profile class documents the initial set of unprivileged instructions. +description: | + The RVI profile class provides a generic target for software toolchains + and represent the minimum level of compatibility with RISC-V ratified standards. - NOTE: Profiles in this class are designated as _unprivileged_ profiles as opposed to - _user_-_mode_ profiles. Code using this profile class can run in any - privilege mode, and so requested and fatal traps may be horizontal - traps into an execution environment running in the same privilege mode. - naming_scheme: | - The profile class name is RVI (RISC-V base Integer instructions). - A profile release name is an integer (currently 2 digits, could grow in the future). - A full profile name is comprised of, in order: + NOTE: Profiles in this class are designated as _unprivileged_ profiles as opposed to + _user_-_mode_ profiles. Code using this profile class can run in any + privilege mode, and so requested and fatal traps may be horizontal + traps into an execution environment running in the same privilege mode. +naming_scheme: | + The profile class name is RVI (RISC-V base Integer instructions). + A profile release name is an integer (currently 2 digits, could grow in the future). + A full profile name is comprised of, in order: - * Prefix *RVI* for RISC-V Integer - * Profile release - * Privilege mode: - ** *U* Unprivileged (available to any privilege mode, *U* is *not* User-mode) - * A base ISA XLEN specifier (*32*, *64*) - company: - name: RISC-V International - url: https://riscv.org - doc_license: - name: Creative Commons Attribution 4.0 International License - url: https://creativecommons.org/licenses/by/4.0/ - text_url: https://creativecommons.org/licenses/by/4.0/legalcode.txt \ No newline at end of file + * Prefix *RVI* for RISC-V Integer + * Profile release + * Privilege mode: + ** *U* Unprivileged (available to any privilege mode, *U* is *not* User-mode) + * A base ISA XLEN specifier (*32*, *64*) +company: + name: RISC-V International + url: https://riscv.org +doc_license: + name: Creative Commons Attribution 4.0 International License + url: https://creativecommons.org/licenses/by/4.0/ + text_url: https://creativecommons.org/licenses/by/4.0/legalcode.txt \ No newline at end of file diff --git a/arch/profile_release/MockProfileRelease.yaml b/arch/profile_release/MockProfileRelease.yaml index 562af1c53..1580f87c6 100644 --- a/arch/profile_release/MockProfileRelease.yaml +++ b/arch/profile_release/MockProfileRelease.yaml @@ -19,82 +19,5 @@ MockProfileRelease: email: jane.doe@gmail.com company: Universal Imports profiles: - MP-U-64: - marketing_name: MockProfile 64-bit Unpriv - mode: Unpriv - base: 64 - release: MockProfileRelease - extensions: - A: - presence: optional - version: "= 2.1" - I: - presence: mandatory - version: "~> 2.1" - Svade: - presence: mandatory - note: Adding this to get coverage when extension "conflicts" with another (Svadu in this case). - MP-S-64: - marketing_name: MockProfile 64-bit S-mode - description: This is the Mock Profile Supervisor Mode description. - mode: S - base: 64 - release: MockProfileRelease - contributors: - - name: Micky Mouse - email: micky@disney.com - company: Disney - extensions: - $inherits: "#/MockProfileRelease/profiles/MP-U-64/extensions" - A: - presence: mandatory - note: This should be listed as mandatory in MP-S-64 and optional in MP-U-64. - S: - presence: - optional: localized - version: "= 1.12" - Zifencei: - presence: - optional: development - version: "= 2.0" - note: - Zihpm: - presence: - optional: expansion - version: "= 2.0" - note: Made this a expansion option - Sv48: - presence: - optional: transitory - version: "= 1.11" - note: Made this a transitory option - extra_notes: - - presence: mandatory - text: | - Here's the first extra note for the mandatory extensions section. - This note is multiple lines. - - presence: optional - text: | - Here's the first extra note for the optional extensions section. - In this case, we don't differentiate between optional types. - This note is multiple lines. - - presence: - optional: localized - text: Here's the first extra note for the localized optional extensions section. - - presence: - optional: localized - text: Here's the second extra note for the localized optional extensions section. - - presence: - optional: development - text: Here's the first extra note for the development optional extensions section. - - presence: - optional: expansion - text: Here's the first extra note for the expansion optional extensions section. - - presence: - optional: transitory - text: Here's the first extra note for the transitory optional extensions section. - recommendations: - - text: | - Implementations are strongly recommended to raise illegal-instruction - exceptions on attempts to execute unimplemented opcodes. - - text: Micky should give Pluto an extra treat \ No newline at end of file + - { $ref: profile/MP-U-64.yaml# } + - { $ref: profile/MP-S-64.yaml# } diff --git a/arch/profile_release/RVA20.yaml b/arch/profile_release/RVA20.yaml index 6be79e41d..649e0e594 100644 --- a/arch/profile_release/RVA20.yaml +++ b/arch/profile_release/RVA20.yaml @@ -32,159 +32,5 @@ RVA20: email: krste@sifive.com company: SiFive profiles: - RVA20U64: - marketing_name: RVA20U64 - mode: Unpriv - base: 64 - release: RVA20 - introduction: | - The RVA20U64 profile specifies the ISA features available to user-mode - execution environments in 64-bit applications processors. This is the - most important profile within application processors in - terms of the amount of software that targets this profile. - extensions: - $inherits: "profile_release/RVI20.yaml#/RVI20/profiles/RVI20U64/extensions" - $remove: Zifencei # Not allowed as an option for Unpriv ISA (only available in Priv ISA). - A: - presence: mandatory - C: - presence: mandatory - D: - presence: mandatory - F: - presence: mandatory - M: - presence: mandatory - U: - presence: mandatory - version: "~> 2.0" - param_constraints: - U_MODE_ENDIANESS: - schema: - const: little - Zicntr: - presence: mandatory - Ziccif: - presence: mandatory - version: "= 1.0" - note: | - Ziccif is a profile-defined extension introduced with RVA20. - The fetch atomicity requirement facilitates runtime patching - of aligned instructions. - Ziccrse: - presence: mandatory - version: "= 1.0" - note: Ziccrse is a profile-defined extension introduced with RVA20. - Ziccamoa: - presence: mandatory - version: "= 1.0" - note: Ziccamo is a profile-defined extension introduced with RVA20. - Za128rs: - presence: mandatory - version: "= 1.0" - note: | - Za128rs is a profile-defined extension introduced with RVA20. - The minimum reservation set size is effectively determined by the - size of atomic accesses in the `A` extension. - Zicclsm: - presence: mandatory - version: "= 1.0" - note: | - Zicclsm is a profile-defined extension introduced with RVA20. - This requires misaligned support for all regular load and store - instructions (including scalar and vector) but not AMOs or other - specialized forms of memory access. Even though mandated, misaligned - loads and stores might execute extremely slowly. Standard software - distributions should assume their existence only for correctness, not - for performance. - extra_notes: - - presence: optional - text: | - The rationale to not make Q an optional extension is that - quad-precision floating-point is unlikely to be implemented in - hardware, and so we do not require or expect A-profile software to - expend effort optimizing use of Q instructions in case they are - present. - - presence: optional - text: | - Zifencei is not classed as a supported option in the user-mode - profile because it is not sufficient by itself to produce the desired - effect in a multiprogrammed multiprocessor environment without OS - support, and so the instruction cache flush should always be performed - using an OS call rather than using the `fence.i` instruction. - `fence.i` semantics can be expensive to implement for some hardware - memory hierarchy designs, and so alternative non-standard - instruction-cache coherence mechanisms can be used behind the OS - abstraction. A separate extension is being developed for more general - and efficient instruction cache coherence. - - presence: optional - text: | - The execution environment must provide a means to synchronize writes to - instruction memory with instruction fetches, the implementation of which - likely relies on the Zifencei extension. - For example, RISC-V Linux supplies the `__riscv_flush_icache` system call and - a corresponding vDSO call. - recommendations: - - text: | - Implementations are strongly recommended to raise illegal-instruction - exceptions on attempts to execute unimplemented opcodes. - RVA20S64: - marketing_name: RVA20S64 - mode: S - base: 64 - release: RVA20 - introduction: | - The RVA20S64 profile specifies the ISA features available to a - supervisor-mode execution environment in 64-bit applications - processors. RVA20S64 is based on privileged architecture version 1.11. - extensions: - S: - presence: mandatory - version: "= 1.11" - Zifencei: - presence: mandatory - version: "= 2.0" - note: | - Zifencei is mandated as it is the only standard way to support - instruction-cache coherence in RVA20 application processors. A new - instruction-cache coherence mechanism is under development which might - be added as an option in the future. - Svbare: - presence: mandatory - version: "= 1.0" - note: | - Svbare is a new extension name introduced with RVA20. - Sv39: - presence: mandatory - version: "= 1.11" - Svade: - presence: mandatory - version: "~> 1.0" - note: | - Svbare is a new extension name introduced with RVA20. - - It is subsequently defined in more detail with the ratification of - `Svadu`. - Ssccptr: - presence: mandatory - version: "= 1.0" - note: | - Ssccptr is a new extension name introduced with RVA20. - Sstvecd: - presence: mandatory - version: "= 1.0" - note: | - Sstvecd is a new extension name introduced with RVA20. - Sstvala: - presence: mandatory - version: "= 1.0" - note: | - Sstvala is a new extension name introduced with RVA20. - Sv48: - presence: optional - version: "= 1.11" - Ssu64xl: - presence: optional - version: "= 1.0" - note: | - Ssu64xl is a new extension name introduced with RVA20. \ No newline at end of file + - { $ref: profile/RVA20U64.yaml# } + - { $ref: profile/RVA20S64.yaml# } diff --git a/arch/profile_release/RVA22.yaml b/arch/profile_release/RVA22.yaml index 327ff31aa..f36c5bbc2 100644 --- a/arch/profile_release/RVA22.yaml +++ b/arch/profile_release/RVA22.yaml @@ -32,229 +32,5 @@ RVA22: email: krste@sifive.com company: SiFive profiles: - RVA22U64: - marketing_name: RVA22U64 - mode: Unpriv - base: 64 - release: RVA22 - introduction: | - The RVA22U64 profile specifies the ISA features available to user-mode - execution environments in 64-bit applications processors. This is the - most important profile within application processors in - terms of the amount of software that targets this profile. - extensions: - $inherits: "profile_release/RVA20.yaml#/RVA20/profiles/RVA20U64/extensions" - Zihpm: - presence: mandatory - version: "= 2.0" - Zihintpause: - presence: mandatory - version: "= 2.0" - note: | - While the `pause` instruction is a HINT can be implemented as a - NOP and hence trivially supported by hardware implementers, its - inclusion in the mandatory extension list signifies that software - should use the instruction whenever it would make sense and that - implementors are expected to exploit this information to optimize - hardware execution. - Zba: - presence: mandatory - version: "~> 1.0" - Zbb: - presence: mandatory - version: "~> 1.0" - Zbs: - presence: mandatory - version: "~> 1.0" - Zic64b: - presence: mandatory - version: "= 1.0" - note: | - This is a new extension name for this feature. While the general - RISC-V specifications are agnostic to cache block size, selecting a - common cache block size simplifies the specification and use of the - following cache-block extensions within the application processor - profile. Software does not have to query a discovery mechanism and/or - provide dynamic dispatch to the appropriate code. We choose 64 bytes - at it is effectively an industry standard. Implementations may use - longer cache blocks to reduce tag cost provided they use 64-byte - sub-blocks to remain compatible. Implementations may use shorter cache - blocks provided they sequence cache operations across the multiple - cache blocks comprising a 64-byte block to remain compatible. - Zicbom: - presence: mandatory - version: "~> 1.0" - Zicbop: - presence: mandatory - version: "~> 1.0" - note: | - As with other HINTS, the inclusion of prefetches in the - mandatory set of extensions indicates that software should generate - these instructions where they are expected to be useful, and hardware - is expected to exploit that information. - Zicboz: - presence: mandatory - version: "~> 1.0" - Zfhmin: - presence: mandatory - version: "~> 1.0" - note: | - Zfhmin is a small extension that adds support to load/store and convert - IEEE 754 half-precision numbers to and from the IEEE 754 single-precision - format. The hardware cost for this extension is low, and mandating the - extension avoids adding an option to the profile. - Zkt: - presence: mandatory - version: "~> 1.0" - note: | - Zkt requires a certain subset of integer instructions execute - with data-independent latency. Mandating this feature enables - portable libraries for safe basic cryptographic operations. It is - expected that application processors will naturally have this property - and so implementation cost is low, if not zero, in most systems that - would support RVA22. - Zfh: - presence: optional - version: "~> 1.0" - note: A future profile might mandate V. - V: - presence: optional - version: "~> 1.0" - note: | - The smaller vector extensions (Zve32f, Zve32x, Zve64d, Zve64f, - Zve64x) are not provided as separately supported profile options. The - full V extension is specified as the only supported profile option. - - A future profile might mandate V. - Zkn: - presence: optional - version: "~> 1.0" - Zks: - presence: optional - version: "~> 1.0" - extra_notes: - - presence: optional - text: | - The scalar crypto extensions are expected to be superseded by - vector crypto standards in future profiles, and the scalar extensions - may be removed as supported options once vector crypto is present. - - presence: optional - text: | - The smaller component scalar crypto extensions (Zbc, Zbkb, Zbkc, - Zbkx, Zknd, Zkne, Zknh, Zksed, Zksh) are not provided as separate - options in the profile. Profile implementers should provide all of - the instructions in a given algorithm suite as part of the Zkn or Zks - supported options. - - presence: optional - text: | - Access to the entropy source (Zkr) in a system is usually - carefully controlled. While the design supports unprivileged access - to the entropy source, this is unlikely to be commonly used in an - application processor, and so Zkr was not added as a profile option. - This also means the roll-up Zk was not added as a profile option. - - presence: optional - text: | - The Zfinx, Zdinx, Zhinx, Zhinxmin extensions are incompatible - with the profile mandates to support the F and D extensions. - RVA22S64: - marketing_name: RVA22S64 - mode: S - base: 64 - release: RVA22 - introduction: | - The RVA22S64 profile specifies the ISA features available to a - supervisor-mode execution environment in 64-bit applications - processors. RVA22S64 is based on privileged architecture version - 1.12. - extensions: - $inherits: "profile_release/RVA20.yaml#/RVA20/profiles/RVA20S64/extensions" - S: - presence: mandatory - version: "= 1.12" - Sscounterenw: - presence: mandatory - version: "= 1.0" - note: | - Sstvala is a new extension name introduced with RVA22. - Svpbmt: - presence: mandatory - version: "~> 1.0" - Svinval: - presence: mandatory - version: "~> 1.0" - Ssstateen: - presence: mandatory - version: "~> 1.0" - when: - implemented: H - note: | - Ssstateen is a new extension name introduced with RVA22. - Shvstvala: - presence: mandatory - version: "~> 1.0" - when: - implemented: H - note: | - Shvstvala is a new extension name introduced with RVA22. - Shtvala: - presence: mandatory - version: "~> 1.0" - when: - implemented: H - note: | - Shtvala is a new extension name introduced with RVA22. - Shvstvecd: - presence: mandatory - version: "~> 1.0" - when: - implemented: H - note: | - Shvstvecd is a new extension name introduced with RVA22. - Shgatpa: - presence: mandatory - version: "~> 1.0" - when: - implemented: H - note: | - Shgatpa is a new extension name introduced with RVA22. - Sv57: - presence: optional - version: "~> 1.12" - Svnapot: - presence: optional - version: "~> 1.0" - note: | - It is expected that Svnapot will be mandatory in the next - profile release. - Sstc: - presence: optional - version: "~> 1.0" - note: | - Sstc was not made mandatory in RVA22S64 as it is a more - disruptive change affecting system-level architecture, and will take - longer for implementations to adopt. It is expected to be made - mandatory in the next profile release. - Sscofpmf: - presence: optional - version: "~> 1.0" - note: | - Platforms may choose to mandate the presence of Sscofpmf. - Zkr: - presence: optional - version: "~> 1.0" - note: | - Technically, Zk is also a privileged-mode option capturing that - Zkr, Zkn, and Zkt are all implemented. However, the Zk rollup is less - descriptive than specifying the individual extensions explicitly. - H: - presence: optional - version: "~> 1.0" - note: | - The following extensions become mandatory when H is implemented: - - * Ssstateen - * Shcounterenw - * Shvstvala - * Shtvala - * Shvstvecd - * Shgatpa \ No newline at end of file + - { $ref: profile/RVA22U64.yaml# } + - { $ref: profile/RVA22S64.yaml# } diff --git a/arch/profile_release/RVI20.yaml b/arch/profile_release/RVI20.yaml index 759619cae..535e274c6 100644 --- a/arch/profile_release/RVI20.yaml +++ b/arch/profile_release/RVI20.yaml @@ -17,73 +17,5 @@ RVI20: email: krste@sifive.com company: SiFive profiles: - RVI20U32: - marketing_name: RVI20U32 - mode: Unpriv - base: 32 - release: RVI20 - introduction: | - This profile specifies the ISA features available to generic unprivileged - execution environments. - extensions: - I: - presence: mandatory - version: "~> 2.1" - note: | - RVI is the mandatory base ISA for RVA, and is little-endian. - - As per the unprivileged architecture specification, the `ecall` - instruction causes a requested trap to the execution environment. - - Misaligned loads and stores might not be supported. - - The `fence.tso` instruction is mandatory. - - NOTE: The `fence.tso` instruction was incorrectly described as - optional in the 2019 ratified specifications. However, `fence.tso` is - encoded within the standard `fence` encoding such that implementations - must treat it as a simple global fence if they do not natively support - TSO-ordering optimizations. As software can always assume without any - penalty that `fence.tso` is being exploited by a hardware - implementation, there is no advantage to making the instruction a - profile option. Later versions of the unprivileged ISA specifications - correctly indicate that `fence.tso` is mandatory. - A: - presence: optional - version: "= 2.1" - C: - presence: optional - version: "= 2.2" - D: - presence: optional - version: "= 2.2" - note: | - NOTE: The rationale to not include Q as a profile option is that - quad-precision floating-point is unlikely to be implemented in - hardware, and so we do not require or expect software to expend effort - optimizing use of Q instructions in case they are present. - F: - presence: optional - version: "= 2.2" - M: - presence: optional - version: "= 2.0" - Zicntr: - presence: optional - version: " = 2.0" - Zihpm: - presence: optional - version: "= 2.0" - note: | - The number of counters is platform-specific. - Zifencei: - presence: optional - version: "= 2.0" - recommendations: - - text: | - Implementations are strongly recommended to raise illegal-instruction - exceptions on attempts to execute unimplemented opcodes. - RVI20U64: - $inherits: "#/RVI20/profiles/RVI20U32" - base: 64 - marketing_name: RVI20U64 \ No newline at end of file + - { $ref: profile/RVI20U32.yaml# } + - { $ref: profile/RVI20U64.yaml# } diff --git a/backends/arch_gen/lib/arch_gen.rb b/backends/arch_gen/lib/arch_gen.rb index 7f424fd6f..91e9dbb49 100644 --- a/backends/arch_gen/lib/arch_gen.rb +++ b/backends/arch_gen/lib/arch_gen.rb @@ -286,24 +286,24 @@ def gen_arch_def profile_class_obj = YamlLoader.load(f, permitted_classes:[Date]) profile_class_name = profile_class_obj.keys[0] profile_class_obj[profile_class_name]["name"] = profile_class_name - profile_class_obj[profile_class_name]["__source"] = f + profile_class_obj[profile_class_name]["$source"] = f [profile_class_name, profile_class_obj[profile_class_name]] end.to_h profile_release_hash = Dir.glob($root / "arch" / "profile_release" / "**" / "*.yaml").map do |f| profile_release_obj = YamlLoader.load(f, permitted_classes:[Date]) profile_release_name = profile_release_obj.keys[0] profile_release_obj[profile_release_name]["name"] = profile_release_name - profile_release_obj[profile_release_name]["__source"] = f + profile_release_obj[profile_release_name]["$source"] = f [profile_release_name, profile_release_obj[profile_release_name]] end.to_h cert_class_ary = Dir.glob($root / "arch" / "certificate_class" / "**" / "*.yaml").map do |f| cert_class_obj = YamlLoader.load(f, permitted_classes:[Date]) - cert_class_obj["__source"] = f + cert_class_obj["$source"] = f cert_class_obj end cert_model_ary = Dir.glob($root / "arch" / "certificate_model" / "**" / "*.yaml").map do |f| cert_model_obj = YamlLoader.load(f, permitted_classes:[Date]) - cert_model_obj["__source"] = f + cert_model_obj["$source"] = f cert_model_obj end manual_hash = {} @@ -317,14 +317,14 @@ def gen_arch_def manual_info_file = manual_info_files.first manual_hash[manual_id] = YamlLoader.load(manual_info_file, permitted_classes:[Date]) - manual_hash[manual_id]["__source"] = manual_info_file + manual_hash[manual_id]["$source"] = manual_info_file # TODO: schema validation end manual_hash[manual_id]["versions"] ||= [] manual_hash[manual_id]["versions"] << YamlLoader.load(f, permitted_classes:[Date]) # TODO: schema validation - manual_hash[manual_id]["versions"].last["__source"] = f + manual_hash[manual_id]["versions"].last["$source"] = f end arch_def = { @@ -622,7 +622,7 @@ def maybe_add_csr(csr_name, extra_env = {}) # get the csr data (not including the name key), which is redundant at this point csr_data = YAML.load_file(merged_path) csr_data["fields"].each { |n, f| f["name"] = n } - csr_data["__source"] = og_path.to_s + csr_data["$source"] = og_path.to_s csr_yaml = YAML.dump(csr_data) begin @@ -864,7 +864,7 @@ def maybe_add_inst(inst_name, extra_env = {}) # get the inst data (not including the name key), which is redundant at this point inst_data = YAML.load_file(merged_path) - inst_data["__source"] = og_path.to_s + inst_data["$source"] = og_path.to_s inst_yaml = YAML.dump(inst_data) begin diff --git a/backends/arch_gen/tasks.rake b/backends/arch_gen/tasks.rake deleted file mode 100644 index 6515e54f5..000000000 --- a/backends/arch_gen/tasks.rake +++ /dev/null @@ -1,180 +0,0 @@ -# frozen_string_literal: true - -# This file contains tasks related to the generation of a configured architecture specification - -require_relative "../../lib/yaml_loader" -require_relative "lib/arch_gen" - -ARCH_GEN_DIR = Pathname.new(__FILE__).dirname - -def arch_def_for(config_name) - config_name = "_" if config_name.nil? - @arch_defs ||= {} - return @arch_defs[config_name] if @arch_defs.key?(config_name) - - @arch_defs[config_name] = - if config_name == "_" - ArchDef.new("_", $root / "gen" / "_" / "arch" / "arch_def.yaml") - else - ArchDef.new( - config_name, - $root / "gen" / config_name / "arch" / "arch_def.yaml", - overlay_path: $root / "cfgs" / config_name / "arch_overlay" - ) - end -end - -file "#{$root}/.stamps/arch-gen.stamp" => ( - [ - "#{$root}/.stamps", - "#{ARCH_GEN_DIR}/lib/arch_gen.rb", - "#{$root}/lib/idl/ast.rb", - "#{ARCH_GEN_DIR}/tasks.rake", - __FILE__ - ] + Dir.glob($root / "arch" / "**" / "*.yaml") -) do |t| - csr_ary = Dir.glob($root / "arch" / "csr" / "**" / "*.yaml").map do |f| - csr_obj = YamlLoader.load(f, permitted_classes:[Date]) - csr_obj["fields"].map do |k, v| - v["name"] = k - [k, v] - end - csr_obj["__source"] = f - csr_obj - end - inst_ary = Dir.glob($root / "arch" / "inst" / "**" / "*.yaml").map do |f| - inst_obj = YamlLoader.load(f, permitted_classes:[Date]) - inst_obj["__source"] = f - inst_obj - end - ext_ary = Dir.glob($root / "arch" / "ext" / "**" / "*.yaml").map do |f| - ext_obj = YamlLoader.load(f, permitted_classes:[Date]) - ext_obj["__source"] = f - ext_obj - end - profile_class_hash = Dir.glob($root / "arch" / "profile_class" / "**" / "*.yaml").map do |f| - profile_class_obj = YamlLoader.load(f, permitted_classes:[Date]) - profile_class_name = profile_class_obj.keys[0] - profile_class_obj[profile_class_name]["name"] = profile_class_name - profile_class_obj[profile_class_name]["__source"] = f - [profile_class_name, profile_class_obj[profile_class_name]] - end.to_h - profile_release_hash = Dir.glob($root / "arch" / "profile_release" / "**" / "*.yaml").map do |f| - profile_release_obj = YamlLoader.load(f, permitted_classes:[Date]) - profile_release_name = profile_release_obj.keys[0] - profile_release_obj[profile_release_name]["name"] = profile_release_name - profile_release_obj[profile_release_name]["__source"] = f - [profile_release_name, profile_release_obj[profile_release_name]] - end.to_h - cert_class_ary = Dir.glob($root / "arch" / "certificate_class" / "**" / "*.yaml").map do |f| - cert_class_obj = YamlLoader.load(f, permitted_classes:[Date]) - cert_class_obj["__source"] = f - cert_class_obj - end - cert_model_ary = Dir.glob($root / "arch" / "certificate_model" / "**" / "*.yaml").map do |f| - cert_model_obj = YamlLoader.load(f, permitted_classes:[Date]) - cert_model_obj["__source"] = f - cert_model_obj - end - manual_hash = {} - Dir.glob($root / "arch" / "manual" / "**" / "contents.yaml").map do |f| - manual_version = YamlLoader.load(f, permitted_classes:[Date]) - manual_id = manual_version["manual"] - unless manual_hash.key?(manual_id) - manual_info_files = Dir.glob($root / "arch" / "manual" / "**" / "#{manual_id}.yaml") - raise "Could not find manual info '#{manual_id}'.yaml, needed by #{f}" if manual_info_files.empty? - raise "Found multiple manual infos '#{manual_id}'.yaml, needed by #{f}" if manual_info_files.size > 1 - - manual_info_file = manual_info_files.first - manual_hash[manual_id] = YamlLoader.load(manual_info_file, permitted_classes:[Date]) - manual_hash[manual_id]["__source"] = manual_info_file - # TODO: schema validation - end - - manual_hash[manual_id]["versions"] ||= [] - manual_hash[manual_id]["versions"] << YamlLoader.load(f, permitted_classes:[Date]) - # TODO: schema validation - manual_hash[manual_id]["versions"].last["__source"] = f - end - - arch_def = { - "type" => "unconfigured", - "instructions" => inst_ary, - "extensions" => ext_ary, - "csrs" => csr_ary, - "profile_classes" => profile_class_hash, - "profile_releases" => profile_release_hash, - "certificate_classes" => cert_class_ary, - "certificate_models" => cert_model_ary, - "manuals" => manual_hash - } - - dest = "#{$root}/gen/_/arch/arch_def.yaml" - FileUtils.mkdir_p File.dirname(dest) - File.write(dest, YAML.dump(arch_def)) - - FileUtils.touch(t.name) -end - -obj_model_files = Dir.glob($root / "lib" / "arch_obj_models" / "*.rb") -obj_model_files << ($root / "lib" / "arch_def.rb") - -arch_files = Dir.glob($root / "arch" / "**" / "*.yaml") - -# stamp to indicate completion of Arch Gen for a given config -rule %r{#{$root}/\.stamps/arch-gen-.*\.stamp} => proc { |tname| - config_name = Pathname.new(tname).basename(".stamp").sub("arch-gen-", "") - config_files = - Dir.glob($root / "cfgs" / config_name / "arch_overlay" / "**" / "*.yaml") + - [($root / "cfgs" / config_name / "params.yaml").to_s] - [ - "#{$root}/.stamps", - "#{ARCH_GEN_DIR}/lib/arch_gen.rb", - "#{$root}/lib/idl/ast.rb", - "#{ARCH_GEN_DIR}/tasks.rake", - arch_files, - config_files, - - # the stamp file is not actually dependent on the Ruby object model, - # but in general we want to rebuild anything using this stamp when the object model changes - obj_model_files.map(&:to_s) - ].flatten -} do |t| - config_name = Pathname.new(t.name).basename(".stamp").sub("arch-gen-", "") - - arch_gen = ArchGen.new(config_name) - puts "Generating architecture definition in #{arch_gen.gen_dir.relative_path_from($root)}" - - arch_gen.generate - - puts " Found #{arch_gen.implemented_csrs.size} CSRs" - puts " Found #{arch_gen.implemented_extensions.size} Extensions" - puts " Found #{arch_gen.implemented_instructions.size} Instructions" - - FileUtils.touch t.name -end - -namespace :gen do - desc "Generate the cfg-specific architecture files for config_name" - task :cfg_arch, [:config_name] do |_t, args| - raise "No config '#{args[:config_name]}' found in cfgs/" unless ($root / "cfgs" / args[:config_name]).directory? - - Rake::Task["#{$root}/.stamps/arch-gen-#{args[:config_name]}.stamp"].invoke(args[:config_name]) - end - - desc "Generate a unified architecture file (configuration independent)" - task :arch do - Rake::Task["#{$root}/.stamps/arch-gen.stamp"].invoke - end -end - -namespace :validate do - desc "Validate that a configuration folder valid for the list of extensions it claims to implement" - task :cfg, [:config_name] do |_t, args| - raise "No config '#{args[:config_name]}' found in cfgs/" unless ($root / "cfgs" / args[:config_name]).directory? - - ArchGen.new(args[:config_name]).validate_params - - puts "Success! The '#{args[:config_name]}' configuration passes validation checks" - end -end diff --git a/backends/certificate_doc/tasks.rake b/backends/certificate_doc/tasks.rake index 106c38c11..64945d7d9 100644 --- a/backends/certificate_doc/tasks.rake +++ b/backends/certificate_doc/tasks.rake @@ -17,16 +17,15 @@ Dir.glob("#{$root}/arch/certificate_model/*.yaml") do |f| base = cert_model_obj["base"] raise "Missing certificate model base" if base.nil? - + file "#{$root}/gen/certificate_doc/adoc/#{cert_model_name}.adoc" => [ "#{$root}/arch/certificate_model/#{cert_model_name}.yaml", "#{$root}/arch/certificate_class/#{cert_class_name}.yaml", "#{CERT_DOC_DIR}/templates/certificate.adoc.erb", - __FILE__, - "#{$root}/.stamps/arch-gen-_#{base}.stamp" + __FILE__ ] do |t| # TODO: schema validation - arch_def = arch_def_for("_#{base}") + arch_def = arch_def_for("rv#{base}") cert_model = arch_def.cert_model(cert_model_name) raise "No certificate model defined for #{cert_model_name}" if cert_model.nil? diff --git a/backends/certificate_doc/templates/certificate.adoc.erb b/backends/certificate_doc/templates/certificate.adoc.erb index b71e4d1d4..beaa7b94e 100644 --- a/backends/certificate_doc/templates/certificate.adoc.erb +++ b/backends/certificate_doc/templates/certificate.adoc.erb @@ -120,7 +120,7 @@ None <% ext = arch_def.extension(ext_req.name) -%> | <%= ext_req.req_id %> | <-def,<%= ext_req.name %>>> -| <%= ext_req.version_requirement %> +| <%= ext_req.requirement_specs.map(&:to_s).join(", ") %> | <%= ext.nil? ? "" : ext.long_name %> | <%= ext_req.note.nil? ? "" : ext_req.note %> <% end # each ext_req -%> @@ -219,7 +219,7 @@ None == CSR Summary <% - csrs = cert_model.in_scope_ext_reqs.map { |ext_req| ext_req.csrs(arch_def) }.flatten.uniq + csrs = cert_model.in_scope_ext_reqs.map { |ext_req| ext_req.csrs }.flatten.uniq -%> === By Name @@ -295,10 +295,10 @@ Requirement <%= req.name %> only apply when <%= req.when_pretty %>. === Extension <%= ext_req.name %> + <%= ext.nil? ? "" : "*Long Name*: " + ext.long_name + " +" %> -*Version Requirement*: <%= ext_req.version_requirement %> + +*Version Requirement*: <%= ext_req.requirement_specs.map(&:to_s).join(", ") %> + <% ext.versions.each do |v| -%> -<%= v.version %>:: +<%= v.version_spec %>:: State::: <%= v.state %> <% if v.state == "ratified" -%> @@ -320,7 +320,7 @@ Requirement <%= req.name %> only apply when <%= req.when_pretty %>. <% if v.implications.size > 0 -%> Implies::: <% v.implications.each do |i| -%> - * `<%= i.name %>` version <%= i.version %> + * `<%= i.name %>` version <%= i.version_spec %> <% end -%> <% end -%> <% end -%> @@ -401,7 +401,7 @@ The following instructions are added by this extension: This instruction is defined by: -<%= inst.defined_by.to_asciidoc %> +<%= inst.defined_by_condition.to_asciidoc %> ==== Encoding @@ -521,7 +521,7 @@ This instruction may result in the following synchronous exceptions: == CSR Details <% - csrs = cert_model.in_scope_ext_reqs.map { |ext_req| ext_req.csrs(arch_def) }.flatten.uniq + csrs = cert_model.in_scope_ext_reqs.map { |ext_req| ext_req.csrs }.flatten.uniq csrs.sort_by!(&:name) -%> @@ -548,7 +548,7 @@ h| CSR Address | <%= "0x#{csr.address.to_s(16)}" %> <% if csr.priv_mode == 'VS' -%> h| Virtual CSR Address | <%= "0x#{csr.virtual_address.to_s(16)}" %> <% end -%> -h| Defining extension a| <%= csr.defined_by.to_asciidoc %> +h| Defining extension a| <%= csr.defined_by_condition.to_asciidoc %> <% if csr.dynamic_length?(arch_def) -%> h| Length | <%= csr.length_pretty(arch_def) %> <% else -%> diff --git a/backends/cfg_html_doc/adoc_gen.rake b/backends/cfg_html_doc/adoc_gen.rake index 89ccf01ea..1655164a8 100644 --- a/backends/cfg_html_doc/adoc_gen.rake +++ b/backends/cfg_html_doc/adoc_gen.rake @@ -5,9 +5,7 @@ require "ruby-prof" # fill out templates for every csr, inst, ext, and func ["csr", "inst", "ext", "func"].each do |type| rule %r{#{$root}/\.stamps/adoc-gen-#{type}s-.*\.stamp} => proc { |tname| - config_name = Pathname.new(tname).basename(".stamp").sub("adoc-gen-#{type}s-", "") [ - "#{$root}/.stamps/arch-gen-#{config_name}.stamp", "#{CFG_HTML_DOC_DIR}/templates/#{type}.adoc.erb", "#{$root}/lib/arch_def.rb", "#{$root}/lib/idl/passes/gen_adoc.rb", @@ -28,13 +26,13 @@ require "ruby-prof" case type when "csr" - arch_def.implemented_csrs.each do |csr| + arch_def.transitive_implemented_csrs.each do |csr| path = dir_path / "#{csr.name}.adoc" puts " Generating #{path}" File.write(path, arch_def.find_replace_links(erb.result(binding))) end when "inst" - arch_def.implemented_instructions.each do |inst| + arch_def.transitive_implemented_instructions.each do |inst| path = dir_path / "#{inst.name}.adoc" puts " Generating #{path}" # RubyProf.start @@ -43,7 +41,7 @@ require "ruby-prof" # RubyProf::FlatPrinter.new(result).print(STDOUT) end when "ext" - arch_def.implemented_extensions.each do |ext_version| + arch_def.transitive_implemented_extensions.each do |ext_version| ext = arch_def.extension(ext_version.name) path = dir_path / "#{ext.name}.adoc" puts " Generating #{path}" @@ -86,17 +84,17 @@ require "ruby-prof" case type when "csr" puts "Generting full CSR list" - arch_def.implemented_csrs.each do |csr| + arch_def.transitive_implemented_csrs.each do |csr| lines << " * `#{csr.name}` #{csr.long_name}" end when "ext" puts "Generting full extension list" - arch_def.implemented_extensions.each do |ext_version| + arch_def.transitive_implemented_extensions.each do |ext_version| lines << " * `#{ext_version.name}` #{ext_version.ext.long_name}" end when "inst" puts "Generting full instruction list" - arch_def.implemented_instructions.each do |inst| + arch_def.transitive_implemented_instructions.each do |inst| lines << " * `#{inst.name}` #{inst.long_name}" end when "func" @@ -112,14 +110,10 @@ require "ruby-prof" end end -rule %r{#{$root}/gen/cfg_html_doc/.*/adoc/ROOT/landing.adoc} => proc { |tname| - config_name = Pathname.new(tname).relative_path_from("#{$root}/gen/cfg_html_doc").to_s.split("/")[0] - [ - "#{$root}/\.stamps/arch-gen-#{config_name}\.stamp", - "#{CFG_HTML_DOC_DIR}/templates/landing.adoc.erb", - __FILE__ - ] -} do |t| +rule %r{#{$root}/gen/cfg_html_doc/.*/adoc/ROOT/landing.adoc} => [ + "#{CFG_HTML_DOC_DIR}/templates/landing.adoc.erb", + __FILE__ +] do |t| config_name = Pathname.new(t.name).relative_path_from("#{$root}/gen/cfg_html_doc").to_s.split("/")[0] arch_def = arch_def_for(config_name) @@ -131,7 +125,6 @@ rule %r{#{$root}/gen/cfg_html_doc/.*/adoc/ROOT/landing.adoc} => proc { |tname| File.write t.name, erb.result(binding) end - namespace :gen do desc "Generate Asciidoc source for config into gen/CONFIG_NAME/adoc" task :adoc, [:config_name] do |_t, args| diff --git a/backends/cfg_html_doc/html_gen.rake b/backends/cfg_html_doc/html_gen.rake index 24b1f3b69..0629170ee 100644 --- a/backends/cfg_html_doc/html_gen.rake +++ b/backends/cfg_html_doc/html_gen.rake @@ -58,7 +58,6 @@ rule %r{#{$root}/gen/cfg_html_doc/.*/antora/modules/nav.adoc} => proc { |tname| Dir.glob("#{$root}/gen/cfg_html_doc/#{config_name}/antora/modules/exts/**/*.adoc") + [ "#{CFG_HTML_DOC_DIR}/templates/toc.adoc.erb", - "#{$root}/.stamps/arch-gen-#{config_name}.stamp", __FILE__ ] } do |t| @@ -76,7 +75,6 @@ rule %r{#{$root}/gen/cfg_html_doc/.*/antora/modules/ROOT/pages/config.adoc} => p config_name = Pathname.new(tname).relative_path_from("#{$root}/gen/cfg_html_doc").to_s.split("/")[0] [ "#{CFG_HTML_DOC_DIR}/templates/config.adoc.erb", - "#{$root}/.stamps/arch-gen-#{config_name}.stamp", __FILE__ ] } do |t| diff --git a/backends/cfg_html_doc/templates/config.adoc.erb b/backends/cfg_html_doc/templates/config.adoc.erb index f6f75655a..96da577f4 100644 --- a/backends/cfg_html_doc/templates/config.adoc.erb +++ b/backends/cfg_html_doc/templates/config.adoc.erb @@ -4,8 +4,8 @@ |=== | Name | Version -<%- arch_def.implemented_extensions.sort{ |a,b| a.name <=> b.name }.each do |e| -%> -| `<%= e.name %>` | <%= e.version.to_s %> +<%- arch_def.transitive_implemented_extensions.sort{ |a,b| a.name <=> b.name }.each do |e| -%> +| `<%= e.name %>` | <%= e.version_spec %> <%- end -%> |=== diff --git a/backends/cfg_html_doc/templates/csr.adoc.erb b/backends/cfg_html_doc/templates/csr.adoc.erb index 671ddb68a..7b698841e 100644 --- a/backends/cfg_html_doc/templates/csr.adoc.erb +++ b/backends/cfg_html_doc/templates/csr.adoc.erb @@ -14,7 +14,7 @@ h| CSR Address | <%= "0x#{csr.address.to_s(16)}" %> <%- if csr.priv_mode == 'VS' -%> h| Virtual CSR Address | <%= "0x#{csr.virtual_address.to_s(16)}" %> <%- end -%> -h| Defining extension a| <%= csr.defined_by.to_asciidoc %> +h| Defining extension a| <%= csr.defined_by_condition.to_asciidoc %> <%- if csr.dynamic_length?(arch_def) -%> h| Length | <%= csr.length_pretty(arch_def) %> <%- else -%> diff --git a/backends/cfg_html_doc/templates/ext.adoc.erb b/backends/cfg_html_doc/templates/ext.adoc.erb index 1808e4941..de508ea4d 100644 --- a/backends/cfg_html_doc/templates/ext.adoc.erb +++ b/backends/cfg_html_doc/templates/ext.adoc.erb @@ -2,13 +2,13 @@ = <%= ext.name %> Extension <%= ext.long_name %> -Implemented Version:: <%= ext_version.version %> +Implemented Version:: <%= ext_version.version_str %> == Versions <%- ext.versions.each do |v| -%> -<%- implemented = arch_def.implemented_extensions.include?(v) -%> -<%= v.version %>:: +<%- implemented = arch_def.transitive_implemented_extensions.include?(v) -%> +<%= v.version_str %>:: Ratification date::: <%= v.ratification_date %> <%- unless v.changes.empty? -%> @@ -26,7 +26,7 @@ Implemented Version:: <%= ext_version.version %> <%- unless v.implications.empty? -%> Implies::: <%- v.implications.each do |i| -%> - * `<%= i.name %>` version <%= i.version %> + * `<%= i.name %>` version <%= i.version_str %> <%- end -%> <%- end -%> <%- end -%> @@ -35,7 +35,7 @@ Implemented Version:: <%= ext_version.version %> <%= ext.description %> -<%- insts = arch_def.implemented_instructions.select { |i| i.definedBy == ext.name || i.definedBy.include?(ext.name) } -%> +<%- insts = arch_def.transitive_implemented_instructions.select { |i| i.definedBy == ext.name || i.definedBy.include?(ext.name) } -%> <%- unless insts.empty? -%> == Instructions diff --git a/backends/cfg_html_doc/templates/inst.adoc.erb b/backends/cfg_html_doc/templates/inst.adoc.erb index 7159fdced..6b7386fde 100644 --- a/backends/cfg_html_doc/templates/inst.adoc.erb +++ b/backends/cfg_html_doc/templates/inst.adoc.erb @@ -7,7 +7,7 @@ This instruction is defined by: -<%- inst.defined_by.to_asciidoc -%> +<%- inst.defined_by_condition.to_asciidoc -%> == Encoding diff --git a/backends/cfg_html_doc/templates/toc.adoc.erb b/backends/cfg_html_doc/templates/toc.adoc.erb index 0282723d5..eafe479b1 100644 --- a/backends/cfg_html_doc/templates/toc.adoc.erb +++ b/backends/cfg_html_doc/templates/toc.adoc.erb @@ -1,17 +1,17 @@ * xref:ROOT:config.adoc[Configuration] .Extensions -<%- arch_def.implemented_extensions.sort { |a, b| a.name <=> b.name }.each do |ext| -%> +<%- arch_def.transitive_implemented_extensions.sort { |a, b| a.name <=> b.name }.each do |ext| -%> * %%LINK%ext;<%= ext.name %>;<%= ext.name %>%% <%- end -%> .Control and Status Registers -<%- arch_def.implemented_csrs.sort { |a, b| a.name <=> b.name }.each do |csr| -%> +<%- arch_def.transitive_implemented_csrs.sort { |a, b| a.name <=> b.name }.each do |csr| -%> * %%LINK%csr;<%= csr.name %>;<%= csr.name %>%% <%- end -%> .Instructions -<%- arch_def.implemented_instructions.sort { |a, b| a.name <=> b.name }.each do |inst| -%> +<%- arch_def.transitive_implemented_instructions.sort { |a, b| a.name <=> b.name }.each do |inst| -%> * %%LINK%inst;<%= inst.name %>;<%= inst.name %>%% <%- end -%> diff --git a/backends/common_templates/adoc/csr.adoc.erb b/backends/common_templates/adoc/csr.adoc.erb index 751b78694..3909b977d 100644 --- a/backends/common_templates/adoc/csr.adoc.erb +++ b/backends/common_templates/adoc/csr.adoc.erb @@ -8,7 +8,7 @@ == Attributes [%autowidth] |=== -h| Defining Extension a| <%= csr.defined_by.to_asciidoc %> +h| Defining Extension a| <%= csr.defined_by_condition.to_asciidoc %> h| CSR Address | <%= "0x#{csr.address.to_s(16)}" %> <%- if csr.priv_mode == 'VS' -%> h| Virtual CSR Address | <%= "0x#{csr.virtual_address.to_s(16)}" %> diff --git a/backends/common_templates/adoc/inst.adoc.erb b/backends/common_templates/adoc/inst.adoc.erb index 8d9422e47..830567b9d 100644 --- a/backends/common_templates/adoc/inst.adoc.erb +++ b/backends/common_templates/adoc/inst.adoc.erb @@ -81,4 +81,4 @@ Operation:: Included in:: -<%= inst.defined_by.to_asciidoc %> +<%= inst.defined_by_condition.to_asciidoc %> diff --git a/backends/ext_pdf_doc/tasks.rake b/backends/ext_pdf_doc/tasks.rake index bbcb92c3a..7fd756364 100644 --- a/backends/ext_pdf_doc/tasks.rake +++ b/backends/ext_pdf_doc/tasks.rake @@ -120,10 +120,7 @@ rule %r{#{$root}/gen/ext_pdf_doc/.*/adoc/.*_extension\.adoc} => proc { |tname| end raise "Can't find extension '#{ext_name}'" if arch_yaml_paths.empty? - stamp = config_name == "_" ? "#{$root}/.stamps/arch-gen-_64.stamp" : "#{$root}/.stamps/arch-gen-#{config_name}.stamp" - [ - stamp, (EXT_PDF_DOC_DIR / "templates" / "ext_pdf.adoc.erb").to_s, arch_yaml_paths, __FILE__ @@ -131,12 +128,7 @@ rule %r{#{$root}/gen/ext_pdf_doc/.*/adoc/.*_extension\.adoc} => proc { |tname| } do |t| config_name = Pathname.new(t.name).relative_path_from("#{$root}/gen/ext_pdf_doc").to_s.split("/")[0] - arch_def = - if config_name == "_" - arch_def_for("_64") - else - arch_def_for(config_name) - end + arch_def = arch_def_for("_") ext_name = Pathname.new(t.name).basename(".adoc").to_s.split("_")[0..-2].join("_") @@ -151,7 +143,7 @@ rule %r{#{$root}/gen/ext_pdf_doc/.*/adoc/.*_extension\.adoc} => proc { |tname| ext.versions else vs = ext.versions.select do |ext_ver| - version_strs.include?(ext_ver.version.to_s) + version_strs.include?(ext_ver.version_spec.to_s) end vs << ext.max_version if version_strs.include?("latest") vs.uniq diff --git a/backends/ext_pdf_doc/templates/ext_pdf.adoc.erb b/backends/ext_pdf_doc/templates/ext_pdf.adoc.erb index 44501d85b..a848216cc 100644 --- a/backends/ext_pdf_doc/templates/ext_pdf.adoc.erb +++ b/backends/ext_pdf_doc/templates/ext_pdf.adoc.erb @@ -1,7 +1,7 @@ [[header]] :description: <%= ext.long_name %> (<%= ext.name %>) :revdate: <%= max_version.ratification_date.nil? ? Date.today : max_version.ratification_date %> -:revnumber: <%= max_version.version %> +:revnumber: <%= max_version.version_spec %> :revmark: <%= case max_version.state when "ratified" @@ -129,7 +129,7 @@ Copyright <%= max_version.ratification_date.nil? ? Date.today.year : max_version == Acknowledgements <%- versions.each do |version| -%> -Contributors to version <%= version.version %> of the specification (in alphabetical order) include: + +Contributors to version <%= version.version_spec %> of the specification (in alphabetical order) include: + <%- unless version.contributors.empty? -%> <%- version.contributors.sort { |a, b| a.name.split(" ").last <=> b.name.split(" ").last }.each do |c| -%> @@ -146,16 +146,16 @@ improved this specification through their comments and questions. == Versions <%- if versions.size > 1 -%> -This specification documents versions <%= versions.map { |v| v.version }.join(', ') %> of <%= ext.name %>: +This specification documents versions <%= versions.map { |v| v.version_spec.to_s }.join(', ') %> of <%= ext.name %>: <%- else -%> -This specification documents version <%= max_version.version %> of <%= ext.name %>. +This specification documents version <%= max_version.version_spec %> of <%= ext.name %>. <%- end -%> === Version History <%- ext.versions.each do |version| -%> -- -Version:: <%= version.version %> +Version:: <%= version.version_spec %> State:: <%= version.state %> <%- unless version.ratification_date.nil? -%> Ratification Date:: <%= version.ratification_date %> @@ -173,10 +173,10 @@ Changes:: <%- end -%> <%- unless version.implications.empty? -%> Implies:: -* <%= version.implications.map { |i| "#{i.name} (#{i.version})" }.join("\n* ") %> -<%- unless version.requirements.empty? -%> +* <%= version.implications.map { |i| "#{i.name} (#{i.version_spec})" }.join("\n* ") %> +<%- unless version.requirement_condition.empty? -%> Requires:: -<%= version.requirements.to_asciidoc %> +<%= version.requirement_condition.to_asciidoc %> <%- end -%> <%- end -%> -- @@ -200,7 +200,7 @@ Requires:: <%- end -%> <%- implications.each do |sub_ext| -%> -==== <%= sub_ext.name %> (<%= sub_ext.version %>) +==== <%= sub_ext.name %> (<%= sub_ext.version_spec %>) <%- if versions.size > 1 -%> <%= sub_ext.name %> (<%= sub_ext.version %>) is implied by @@ -210,10 +210,10 @@ of <%= ext.name %>. <%= arch_def.extension(sub_ext.name).description %> -<%- unless sub_ext.requirements.empty? -%> +<%- unless sub_ext.requirement_condition.empty? -%> <%= sub_ext.name %> requires: -<%= sub_ext.requirements.to_asciidoc %> +<%= sub_ext.requirement_condition.to_asciidoc %> <%- end -%> diff --git a/backends/manual/tasks.rake b/backends/manual/tasks.rake index 80392e38b..1a1a75105 100644 --- a/backends/manual/tasks.rake +++ b/backends/manual/tasks.rake @@ -1,26 +1,33 @@ # frozen_string_literal: true +require "digest" + require_relative "#{$lib}/arch_def" $root = Pathname.new(__FILE__).dirname.dirname.realpath if $root.nil? MANUAL_GEN_DIR = $root / "gen" / "manual" -def versions_from_env(manual) +def versions_from_env(manual_name) versions = ENV["VERSIONS"].split(",") output_hash = nil if versions.include?("all") raise ArgumentError, "'all' was given as a version, so nothing else should be" unless versions.length == 1 - versions = manual.versions + versions = [] + version_fns = Dir.glob("#{$root}/arch/manual_version/**/*.yaml") + raise "Cannot find version files" if version_fns.empty? + + version_fns.each do |manual_version_fn| + manual_version_obj = YAML.load_file(manual_version_fn, permitted_classes: [Date]) + versions << manual_version_obj["name"] if manual_version_obj["manual"]["$ref"] == "manual/#{manual_name}.yaml#" + end output_hash = "all" else - versions = versions.map { |vname| manual.versions.find { |v| v.name = vname } } - if versions.any?(&:nil?) - idx = versions.index(&:nil?) - raise "No manual version '#{ENV['VERSIONS'].split(',')[idx]}' for '#{args[:manual_name]}'" + versions.each do |version| + raise "No manual version #{version}" if Dir.glob("#{$root}/arch/manual_version/**/#{version}.yaml").empty? end - output_hash = versions.size == 1 ? versions[0] : versions.hash + output_hash = versions.size == 1 ? versions[0] : Digest::SHA2.hexdigest(versions.join("")) end [versions, output_hash] @@ -43,7 +50,10 @@ end # Rule to create a chapter page in antora hierarchy rule %r{#{MANUAL_GEN_DIR}/.*/.*/antora/modules/chapters/pages/.*\.adoc} do |t| parts = t.name.sub("#{MANUAL_GEN_DIR}/", "").split("/") - manual_version = arch_def_for("_64").manual(parts[0]).version(parts[1]) + manual_name = parts[0] + version_name = parts[1] + manual = arch_def_for("_").manual(parts[0]) + manual_version = manual.version(parts[1]) chapter_name = File.basename(t.name, ".adoc") volume = manual_version.volumes.find { |v| !v.chapter(chapter_name).nil? } @@ -52,7 +62,7 @@ rule %r{#{MANUAL_GEN_DIR}/.*/.*/antora/modules/chapters/pages/.*\.adoc} do |t| chapter = volume.chapter(chapter_name) FileUtils.mkdir_p File.dirname(t.name) - FileUtils.ln_s chapter.path, t.name + FileUtils.ln_s chapter.fullpath, t.name end # Rule to create antora.yml for a manual version @@ -61,20 +71,22 @@ rule %r{#{MANUAL_GEN_DIR}/.*/.*/antora/antora.yml} => proc { |tname| manual_name = parts[0] version_name = parts[1] - manual_yaml_path = $root / "arch" / "manual" / manual_name / "#{manual_name}.yaml" - contents_path = $root / "arch" / "manual" / manual_name / version_name / "contents.yaml" + manual_yaml_path = $root / "arch" / "manual" / "#{manual_name}.yaml" + version_paths = Dir.glob($root / "arch" / "manual_version" / "**" / "#{version_name}.yaml") + raise "Cannot find version" unless version_paths.size == 1 + + version_yaml_path = version_paths[0] raise "Cannot find #{manual_yaml_path}" unless manual_yaml_path.exist? - raise "Cannot find #{contents_path}" unless contents_path.exist? [ __FILE__, manual_yaml_path.to_s, - contents_path.to_s + version_yaml_path.to_s ] } do |t| parts = t.name.sub("#{MANUAL_GEN_DIR}/", "").split("/") - manual_version = arch_def_for("_64").manual(parts[0])&.version(parts[1]) + manual_version = arch_def_for("_").manual(parts[0])&.version(parts[1]) raise "Can't find any manual version for '#{parts[0]}' '#{parts[1]}'" if manual_version.nil? @@ -94,22 +106,24 @@ rule %r{#{MANUAL_GEN_DIR}/.*/.*/antora/nav.adoc} => proc { |tname| manual_name = parts[0] version_name = parts[1] - manual_yaml_path = $root / "arch" / "manual" / manual_name / "#{manual_name}.yaml" - contents_path = $root / "arch" / "manual" / manual_name / version_name / "contents.yaml" + manual_yaml_path = $root / "arch" / "manual" / "#{manual_name}.yaml" + version_paths = Dir.glob($root / "arch" / "manual_version" / "**" / "#{version_name}.yaml") + raise "Cannot find version" unless version_paths.size == 1 + + version_yaml_path = version_paths[0] nav_template_path = $root / "backends" / "manual" / "templates" / "#{manual_name}_nav.adoc.erb" raise "Cannot find #{manual_yaml_path}" unless manual_yaml_path.exist? - raise "Cannot find #{contents_path}" unless contents_path.exist? FileList[ __FILE__, manual_yaml_path.to_s, - contents_path.to_s, + version_yaml_path.to_s, nav_template_path.to_s ] } do |t| parts = t.name.sub("#{MANUAL_GEN_DIR}/", "").split("/") - manual_version = arch_def_for("_64").manual(parts[0])&.version(parts[1]) + manual_version = arch_def_for("_").manual(parts[0])&.version(parts[1]) raise "Can't find any manual version for '#{parts[0]}' '#{parts[1]}'" if manual_version.nil? @@ -118,9 +132,10 @@ rule %r{#{MANUAL_GEN_DIR}/.*/.*/antora/nav.adoc} => proc { |tname| raise "There is no navigation file for manual '#{parts[0]}' at '#{nav_template_path}'" end + raise "no arch_def" if manual_version.arch_def.nil? + erb = ERB.new(nav_template_path.read, trim_mode: "-") erb.filename = nav_template_path.to_s - puts erb.encoding FileUtils.mkdir_p File.dirname(t.name) File.write t.name, erb.result(binding) @@ -132,23 +147,26 @@ rule %r{#{MANUAL_GEN_DIR}/.*/.*/antora/modules/ROOT/pages/index.adoc} => proc { manual_name = parts[0] version_name = parts[1] - manual_yaml_path = $root / "arch" / "manual" / manual_name / "#{manual_name}.yaml" - contents_path = $root / "arch" / "manual" / manual_name / version_name / "contents.yaml" + manual_yaml_path = $root / "arch" / "manual" / "#{manual_name}.yaml" + version_paths = Dir.glob($root / "arch" / "manual_version" / "**" / "#{version_name}.yaml") + raise "Cannot find version" unless version_paths.size == 1 + + version_yaml_path = version_paths[0] + version_index_template_path = $root / "backends" / "manual" / "templates" / "#{manual_name}_version_index.adoc.erb" raise "Cannot find #{manual_yaml_path}" unless manual_yaml_path.exist? - raise "Cannot find #{contents_path}" unless contents_path.exist? raise "Cannot find #{version_index_template_path}" unless version_index_template_path.exist? FileList[ __FILE__, manual_yaml_path.to_s, - contents_path.to_s, + version_yaml_path.to_s, version_index_template_path.to_s ] } do |t| parts = t.name.sub("#{MANUAL_GEN_DIR}/", "").split("/") - manual_version = arch_def_for("_64").manual(parts[0])&.version(parts[1]) + manual_version = arch_def_for("_").manual(parts[0])&.version(parts[1]) raise "Can't find any manual version for '#{parts[0]}' '#{parts[1]}'" if manual_version.nil? @@ -168,12 +186,11 @@ end # rule to create instruction appendix page rule %r{#{MANUAL_GEN_DIR}/.*/.*/antora/modules/insts/pages/.*.adoc} => [ __FILE__, - "#{$root}/.stamps/arch-gen-_64.stamp", ($root / "backends" / "manual" / "templates" / "instruction.adoc.erb").to_s ] do |t| inst_name = File.basename(t.name, ".adoc") - arch_def = arch_def_for("_64") + arch_def = arch_def_for("_") inst = arch_def.instruction(inst_name) raise "Can't find instruction '#{inst_name}'" if inst.nil? @@ -213,12 +230,11 @@ end # rule to create ext appendix page rule %r{#{MANUAL_GEN_DIR}/.*/.*/antora/modules/exts/pages/.*.adoc} => [ __FILE__, - "#{$root}/.stamps/arch-gen-_64.stamp", ($root / "backends" / "manual" / "templates" / "ext.adoc.erb").to_s ] do |t| ext_name = File.basename(t.name, ".adoc") - arch_def = arch_def_for("_64") + arch_def = arch_def_for("_") ext = arch_def.extension(ext_name) raise "Can't find extension '#{ext_name}'" if ext.nil? @@ -233,10 +249,9 @@ end # rule to create IDL function appendix page rule %r{#{MANUAL_GEN_DIR}/.*/.*/antora/modules/funcs/pages/funcs.adoc} => [ __FILE__, - "#{$root}/.stamps/arch-gen-_64.stamp", ($root / "backends" / "manual" / "templates" / "func.adoc.erb").to_s ] do |t| - arch_def = arch_def_for("_64") + arch_def = arch_def_for("_") funcs_template_path = $root / "backends" / "manual" / "templates" / "func.adoc.erb" erb = ERB.new(funcs_template_path.read, trim_mode: "-") @@ -249,10 +264,9 @@ end # rule to create IDL function appendix page rule %r{#{MANUAL_GEN_DIR}/.*/.*/antora/modules/params/pages/param_list.adoc} => [ __FILE__, - "#{$root}/.stamps/arch-gen-_64.stamp", ($root / "backends" / "manual" / "templates" / "param_list.adoc.erb").to_s ] do |t| - arch_def = arch_def_for("_64") + arch_def = arch_def_for("_") parts = t.name.sub("#{MANUAL_GEN_DIR}/", "").split("/") manual_version = arch_def.manual(parts[0])&.version(parts[1]) @@ -270,7 +284,7 @@ rule %r{#{MANUAL_GEN_DIR}/.*/top/.*/antora/landing/antora.yml} => [ parts = t.name.sub("#{MANUAL_GEN_DIR}/", "").split("/") manual_name = parts[0] - arch_def = arch_def_for("_64") + arch_def = arch_def_for("_") manual = arch_def.manual(manual_name) raise "Can't find any manual version for '#{manual_name}'" if manual.nil? @@ -285,27 +299,26 @@ end rule %r{#{MANUAL_GEN_DIR}/.*/top/.*/antora/landing/modules/ROOT/pages/index.adoc} => proc { |tname| parts = tname.sub("#{MANUAL_GEN_DIR}/", "").split("/") manual_name = parts[0] + versions, _ = versions_from_env(ENV["MANUAL_NAME"]) + version_files = Dir.glob($root / "arch" / "manual_version" / "**" / "*.yaml").select { |f| versions.include?(File.basename(f, ".yaml"))} FileList[ __FILE__, - ($root / "arch" / "manual" / manual_name / "#{manual_name}.yaml").to_s, + ($root / "arch" / "manual" / "#{manual_name}.yaml").to_s, ($root / "backends" / "manual" / "templates" / "index.adoc.erb").to_s, - ($root / "arch" / "manual" / manual_name / "**" / "contents.yaml").to_s - ] + ] + version_files } do |t| parts = t.name.sub("#{MANUAL_GEN_DIR}/", "").split("/") manual_name = parts[0] - arch_def = arch_def_for("_64") - manual = arch_def.manual(manual_name) - raise "Can't find any manual version for '#{manual_name}'" if manual.nil? - - versions, output_hash = versions_from_env(manual) + versions, output_hash = versions_from_env(manual_name) raise "unexpected mismatch" unless output_hash == parts[2] landing_template_path = $root / "backends" / "manual" / "templates" / "index.adoc.erb" erb = ERB.new(landing_template_path.read, trim_mode: "-") erb.filename = landing_template_path.to_s + manual = arch_def_for("_").manual(manual_name) + FileUtils.mkdir_p File.dirname(t.name) File.write t.name, erb.result(binding) end @@ -313,27 +326,27 @@ end rule %r{#{MANUAL_GEN_DIR}/.*/top/.*/antora/playbook/playbook.yml} => proc { |tname| parts = tname.sub("#{MANUAL_GEN_DIR}/", "").split("/") manual_name = parts[0] + versions, _ = versions_from_env(ENV["MANUAL_NAME"]) + version_files = Dir.glob($root / "arch" / "manual_version" / "**" / "*.yaml").select { |f| versions.include?(File.basename(f, ".yaml"))} FileList[ __FILE__, - ($root / "arch" / "manual" / manual_name / "#{manual_name}.yaml").to_s, + ($root / "arch" / "manual" / "#{manual_name}.yaml").to_s, ($root / "backends" / "manual" / "templates" / "playbook.yml.erb").to_s, ($root / "arch" / "manual" / manual_name / "**" / "contents.yaml").to_s - ] + ] + version_files } do |t| parts = t.name.sub("#{MANUAL_GEN_DIR}/", "").split("/") manual_name = parts[0] - arch_def = arch_def_for("_64") - manual = arch_def.manual(manual_name) - raise "Can't find any manual version for '#{manual_name}'" if manual.nil? - - versions, output_hash = versions_from_env(manual) + versions, output_hash = versions_from_env(manual_name) raise "unexpected mismatch" unless output_hash == parts[2] playbook_template_path = $root / "backends" / "manual" / "templates" / "playbook.yml.erb" erb = ERB.new(playbook_template_path.read, trim_mode: "-") erb.filename = playbook_template_path.to_s + manual = arch_def_for("_").manual(manual_name) + FileUtils.mkdir_p File.dirname(t.name) File.write t.name, erb.result(binding) end @@ -342,19 +355,25 @@ file $root / "ext" / "riscv-isa-manual" / "README.md" do sh "git submodule update --init ext/riscv-isa-manual 2>&1" end -Dir.glob($root / "arch" / "manual" / "**" / "contents.yaml") do |content_fn| - file "#{File.dirname(content_fn)}/riscv-isa-manual/README.md" => ($root / "ext" / "riscv-isa-manual" / "README.md").to_s do |t| - content_obj = YAML.load_file(content_fn) - git_dir = `git rev-parse --git-dir`.strip - cmd = [ - "git", - "--git-dir=#{git_dir}/modules/ext/riscv-isa-manual", - "worktree add", - File.dirname(t.name), - content_obj["isa_manual_tree"], - "2>&1" - ].join(" ") - sh cmd +rule %r{#{MANUAL_GEN_DIR}/[^/]+/[^/]+/riscv-isa-manual/README.md} => ["#{$root}/ext/riscv-isa-manual/README.md"] do |t| + parts = t.name.sub("#{MANUAL_GEN_DIR}/","").split("/") + manual_version_name = parts[1] + + version_paths = Dir.glob("#{$root}/arch/manual_version/**/#{manual_version_name}.yaml") + raise "No manual version named '#{manual_version_name}' found" unless version_paths.size == 1 + + version_path = version_paths[0] + + version_obj = YAML.load_file(version_path, permitted_classes: [Date]) + raise "Not an isa manual version" unless version_obj["uses_isa_manual"] == true + + FileUtils.mkdir_p File.dirname(t.name) + tree = version_obj["isa_manual_tree"] + Dir.chdir($root / "ext" / "riscv-isa-manual") do + Tempfile.create("isa-manual") do |tmpfile| + sh "git archive --format=tar -o #{tmpfile.path} #{tree}" + sh "tar xf #{tmpfile.path} -C #{File.dirname(t.name)}" + end end end @@ -377,29 +396,32 @@ namespace :gen do A static HTML website will be written into gen/manual/MANUAL_NAME//html DESC desc html_manual_desc - task :html_manual => "#{$root}/.stamps/arch-gen-_64.stamp" do + task :html_manual do raise ArgumentError, "Missing required environment variable MANUAL_NAME\n\n#{html_manual_desc}" if ENV["MANUAL_NAME"].nil? raise ArgumentError, "Missing required environment variable VERSIONS\n\n#{html_manual_desc}" if ENV["VERSIONS"].nil? - arch_def = arch_def_for("_64") - manual = arch_def.manuals.find { |m| m.name == ENV["MANUAL_NAME"] } - raise "No manual '#{ENV['MANUAL_NAME']}'" if manual.nil? + versions, output_hash = versions_from_env(ENV["MANUAL_NAME"]) + arch_def = arch_def_for("_") + + manual = arch_def.manual(ENV["MANUAL_NAME"]) + raise "No manual named '#{ENV['MANUAL_NAME']}" if manual.nil? - versions, output_hash = versions_from_env(manual) # check out the correct version of riscv-isa-manual, if needed versions.each do |version| - next unless version.uses_isa_manual? + version_obj = arch_def.manual_version(version) - unless ($root / "arch" / "manual" / ENV["MANUAL_NAME"] / version.name / "riscv-isa-manual").exist? - Rake::Task[$root / "arch" / "manual" / ENV["MANUAL_NAME"] / version.name / "riscv-isa-manual" / "README.md"].invoke + manual.repo_path = MANUAL_GEN_DIR / ENV["MANUAL_NAME"] / version / "riscv-isa-manual" + + if version_obj.uses_isa_manual? == true \ + && !(MANUAL_GEN_DIR / ENV["MANUAL_NAME"] / version_obj.name / "riscv-isa-manual").exist? + Rake::Task[MANUAL_GEN_DIR / ENV["MANUAL_NAME"] / version_obj.name / "riscv-isa-manual" / "README.md"].invoke end - end - # create chapter pages in antora - versions.each do |version| - antora_path = MANUAL_GEN_DIR / ENV["MANUAL_NAME"] / version.name / "antora" - version.volumes.each do |volume| + # create chapter pages in antora + + antora_path = MANUAL_GEN_DIR / ENV["MANUAL_NAME"] / version_obj.name / "antora" + version_obj.volumes.each do |volume| volume.chapters.each do |chapter| Rake::Task[antora_path / "modules" / "chapters" / "pages" / "#{chapter.name}.adoc"].invoke end @@ -408,13 +430,14 @@ namespace :gen do Rake::Task[antora_path / "modules" / "ROOT" / "pages" / "index.adoc"].invoke Rake::Task[antora_path / "antora.yml"].invoke Rake::Task[antora_path / "nav.adoc"].invoke - version.csrs.each do |csr| + + version_obj.csrs.each do |csr| Rake::Task[antora_path / "modules" / "csrs" / "pages" / "#{csr.name}.adoc"].invoke end - version.instructions.each do |inst| + version_obj.instructions.each do |inst| Rake::Task[antora_path / "modules" / "insts" / "pages" / "#{inst.name}.adoc"].invoke end - version.extensions.each do |ext| + version_obj.extensions.each do |ext| Rake::Task[antora_path / "modules" / "exts" / "pages" / "#{ext.name}.adoc"].invoke end Rake::Task[antora_path / "modules" / "params" / "pages" / "param_list.adoc"].invoke @@ -445,12 +468,13 @@ namespace :gen do end namespace :serve do - task :html_manual do |t| + desc "Serve an HTML site for one or more versions of the manual (gen:html_manual for options)" + task :html_manual do Rake::Task["gen:html_manual"].invoke port = ENV.key?("PORT") ? ENV["PORT"] : 8000 - arch_def = arch_def_for("_64") + arch_def = arch_def_for("_") manual = arch_def.manuals.find { |m| m.name == ENV["MANUAL_NAME"] } raise "No manual '#{ENV['MANUAL_NAME']}'" if manual.nil? diff --git a/backends/manual/templates/csr.adoc.erb b/backends/manual/templates/csr.adoc.erb index 23e1205a9..4589938eb 100644 --- a/backends/manual/templates/csr.adoc.erb +++ b/backends/manual/templates/csr.adoc.erb @@ -10,7 +10,7 @@ == Attributes [%autowidth] |=== -h| Defining Extension a| <%= csr.defined_by.to_asciidoc %> +h| Defining Extension a| <%= csr.defined_by_condition.to_asciidoc %> h| CSR Address | <%= "0x#{csr.address.to_s(16)}" %> <%- if csr.priv_mode == 'VS' -%> h| Virtual CSR Address | <%= "0x#{csr.virtual_address.to_s(16)}" %> diff --git a/backends/manual/templates/ext.adoc.erb b/backends/manual/templates/ext.adoc.erb index 410576a9c..4d43aac94 100644 --- a/backends/manual/templates/ext.adoc.erb +++ b/backends/manual/templates/ext.adoc.erb @@ -5,7 +5,7 @@ == Versions <%- ext.versions.each do |v| -%> -<%= v.version %>:: +<%= v.canonical_version %>:: State::: <%= v.state %> <%- if v.state == "ratified" -%> @@ -36,7 +36,7 @@ <%= ext.description %> -<%- insts = arch_def.instructions.select { |i| ext.versions.any? { |v| i.defined_by?(ext.name, v.version) } } -%> +<%- insts = arch_def.instructions.select { |i| ext.versions.any? { |v| i.defined_by?(v) } } -%> <%- unless insts.empty? -%> == Instructions diff --git a/backends/manual/templates/index.adoc.erb b/backends/manual/templates/index.adoc.erb index b57680656..b555aae86 100644 --- a/backends/manual/templates/index.adoc.erb +++ b/backends/manual/templates/index.adoc.erb @@ -4,8 +4,8 @@ The following versions of the manual can be found here: -<%- versions.each do |version| -%> -* xref:<%= version.marketing_version %>@<%= manual.name %>:ROOT:index.adoc[Version <%= version.marketing_version %>] +<%- manual.versions.each do |version| -%> +* xref:<%= version.name %>@<%= manual.name %>:ROOT:index.adoc[Version <%= version.marketing_version %>] <%- end -%> This site was generated using https://github.com/riscv-software-src/riscv-unified-db[riscv-unified-db], diff --git a/backends/manual/templates/instruction.adoc.erb b/backends/manual/templates/instruction.adoc.erb index 32d9af233..877df6383 100644 --- a/backends/manual/templates/instruction.adoc.erb +++ b/backends/manual/templates/instruction.adoc.erb @@ -7,18 +7,18 @@ This instruction is defined by: -<%= inst.defined_by.to_asciidoc %> +<%= inst.defined_by_condition.to_asciidoc %> This instruction is included in the following profiles: <%- arch_def.profiles.each do |profile| -%> <%- in_profile_mandatory = profile.mandatory_ext_reqs.any? do |ext_req| - ext_versions = ext_req.satisfying_versions(arch_def) + ext_versions = ext_req.satisfying_versions ext_versions.any? { |ext_ver| inst.defined_by?(ext_ver) } end in_profile_optional = profile.optional_ext_reqs.any? do |ext_req| - ext_versions = ext_req.satisfying_versions(arch_def) + ext_versions = ext_req.satisfying_versions ext_versions.any? { |ext_ver| inst.defined_by?(ext_ver) } end if in_profile_mandatory diff --git a/backends/manual/templates/playbook.yml.erb b/backends/manual/templates/playbook.yml.erb index fddeaf637..269d3377e 100644 --- a/backends/manual/templates/playbook.yml.erb +++ b/backends/manual/templates/playbook.yml.erb @@ -6,7 +6,7 @@ content: sources: - url: <%= $root %> start_path: gen/manual/<%= manual.name %>/top/<%= output_hash %>/antora/landing - <%- versions.each do |version| -%> + <%- manual.versions.each do |version| -%> - url: <%= $root %> start_path: gen/manual/<%= manual.name %>/<%= version.name %>/antora <%- end -%> diff --git a/backends/profile_doc/templates/profile.adoc.erb b/backends/profile_doc/templates/profile.adoc.erb index 211975837..7e823dbb1 100644 --- a/backends/profile_doc/templates/profile.adoc.erb +++ b/backends/profile_doc/templates/profile.adoc.erb @@ -511,7 +511,7 @@ This extension has the following implementation options: This instruction is defined by: -<%= inst.defined_by.to_asciidoc %> +<%= inst.defined_by_condition.to_asciidoc %> ==== Encoding @@ -660,7 +660,7 @@ h| CSR Address | <%= "0x#{csr.address.to_s(16)}" %> <% if csr.priv_mode == 'VS' -%> h| Virtual CSR Address | <%= "0x#{csr.virtual_address.to_s(16)}" %> <% end -%> -h| Defining extension a| <%= csr.defined_by.to_asciidoc %> +h| Defining extension a| <%= csr.defined_by_condition.to_asciidoc %> <% if csr.dynamic_length?(arch_def) -%> h| Length | <%= csr.length_pretty(arch_def) %> <% else -%> diff --git a/bin/.container-tag b/bin/.container-tag index be5863417..bd73f4707 100644 --- a/bin/.container-tag +++ b/bin/.container-tag @@ -1 +1 @@ -0.3 +0.4 diff --git a/bin/setup b/bin/setup index 2ec418816..4b36158a1 100755 --- a/bin/setup +++ b/bin/setup @@ -15,7 +15,7 @@ if [ -v GITHUB_ACTIONS ]; then else CONTAINER_PATH=${ROOT}/.singularity/image-$CONTAINER_TAG.sif HOME_PATH=${HOME} - HOME_OPT="--bind ${ROOT}/.home:${HOME_PATH}" + HOME_OPT="--bind ${ROOT}/.home:${HOME_PATH} --bind /local/mnt/workspace/.vscode-server:/local/mnt/workspace/.vscode-server" SINGULARITY_CACHE= fi @@ -91,7 +91,7 @@ if [ -v DEVCONTAINER_ENV ]; then NPM="npm" NPX="npx" NODE="node" - PYTHON="python3" + PYTHON="${ROOT}/.home/.venv/bin/python3" PIP="${ROOT}/.home/.venv/bin/pip" BASH="bash" else @@ -101,7 +101,7 @@ else NPM="singularity run ${HOME_OPT} ${CONTAINER_PATH} npm" NPX="singularity run ${HOME_OPT} ${CONTAINER_PATH} npx" NODE="singularity run ${HOME_OPT} ${CONTAINER_PATH} node" - PYTHON="singularity run ${HOME_OPT} ${CONTAINER_PATH} python3" + PYTHON="singularity run ${HOME_OPT} ${CONTAINER_PATH} ${ROOT}/.home/.venv/bin/python3" PIP="singularity run ${HOME_OPT} ${CONTAINER_PATH} ${ROOT}/.home/.venv/bin/pip" BASH="singularity run ${HOME_OPT} ${CONTAINER_PATH} bash" fi diff --git a/cfgs/_/cfg.yaml b/cfgs/_/cfg.yaml new file mode 100644 index 000000000..b5e409334 --- /dev/null +++ b/cfgs/_/cfg.yaml @@ -0,0 +1,9 @@ +# yaml-language-server: $schema=../../schemas/config_schema.json +--- + +$schema: config_schema.json# +kind: architecture configuration +type: unconfigured +name: _ +description: | + A completely unconfigured RVI-standard architecture; not even MXLEN is known. diff --git a/cfgs/_32/cfg.yaml b/cfgs/_32/cfg.yaml deleted file mode 100644 index ca0f2d33d..000000000 --- a/cfgs/_32/cfg.yaml +++ /dev/null @@ -1,2 +0,0 @@ ---- -type: partially configured diff --git a/cfgs/_32/implemented_exts.yaml b/cfgs/_32/implemented_exts.yaml deleted file mode 100644 index 11151c245..000000000 --- a/cfgs/_32/implemented_exts.yaml +++ /dev/null @@ -1 +0,0 @@ -implemented_extensions: [] \ No newline at end of file diff --git a/cfgs/_32/params.yaml b/cfgs/_32/params.yaml deleted file mode 100644 index b4f8fb3d4..000000000 --- a/cfgs/_32/params.yaml +++ /dev/null @@ -1,5 +0,0 @@ ---- -params: - NAME: _32 - - XLEN: 32 diff --git a/cfgs/_64/cfg.yaml b/cfgs/_64/cfg.yaml deleted file mode 100644 index ca0f2d33d..000000000 --- a/cfgs/_64/cfg.yaml +++ /dev/null @@ -1,2 +0,0 @@ ---- -type: partially configured diff --git a/cfgs/_64/implemented_exts.yaml b/cfgs/_64/implemented_exts.yaml deleted file mode 100644 index 11151c245..000000000 --- a/cfgs/_64/implemented_exts.yaml +++ /dev/null @@ -1 +0,0 @@ -implemented_extensions: [] \ No newline at end of file diff --git a/cfgs/_64/params.yaml b/cfgs/_64/params.yaml deleted file mode 100644 index b4ff65b89..000000000 --- a/cfgs/_64/params.yaml +++ /dev/null @@ -1,4 +0,0 @@ -params: - NAME: _64 - - XLEN: 64 diff --git a/cfgs/generic_rv64/cfg.yaml b/cfgs/generic_rv64/cfg.yaml index c9fde9219..0a300fccd 100644 --- a/cfgs/generic_rv64/cfg.yaml +++ b/cfgs/generic_rv64/cfg.yaml @@ -1,2 +1,556 @@ ---- +$schema: config_schema.json# +kind: architecture configuration type: fully configured +name: generic_rv64 +description: An example fully-specified RV64 system +implemented_extensions: + - [A, "2.1.0"] + - [B, "1.0.0"] + - [C, "2.0.0"] + - [D, "2.2.0"] + - [F, "2.2.0"] + - [I, "2.1.0"] + - [H, "1.0.0"] + - [M, "2.0.0"] + - [S, "1.12.0"] + - [Sm, "1.12.0"] + - [Smhpm, "1.12.0"] + - [Smpmp, "1.12.0"] + - [U, "1.12.0"] + - [V, "1.0.0"] + - [Zicntr, "2.0.0"] + - [Zicsr, "2.0.0"] + - [Zihpm, "2.0.0"] + - [Smaia, "1.0.0"] + - [Smcdeleg, "1.0.0"] + - [Smcntrpmf, "1.0.0"] + - [Sscofpmf, "1.0.0"] + - [Ssaia, "1.0.0"] + - [Ssccfg, "1.0.0"] + - [Sstc, "0.9.0"] + - [Sv39, "1.12.0"] + - [Sv48, "1.12.0"] + - [Zicboz, "1.0.0"] + - [Zicbom, "1.0.0"] + +params: + XLEN: 64 + + # name of the configuration + NAME: generic_rv64 + + # vendor-specific architecture ID in marchid + ARCH_ID: 0x1000000000000000 + + # vendor-specific implementation ID in mimpid + IMP_ID: 0x0 + + # JEDEC Vendor ID bank + VENDOR_ID_BANK: 0x0 + + # JEDEC Vendor ID offset + VENDOR_ID_OFFSET: 0x0 + + # whether or not the implementation supports misaligned loads and stores in main memory (not including atomics) + # must be true when Zicclsm is supported + MISALIGNED_LDST: true + + MISALIGNED_LDST_EXCEPTION_PRIORITY: high + + MISALIGNED_MAX_ATOMICITY_GRANULE_SIZE: 0 + + MISALIGNED_SPLIT_STRATEGY: by_byte + + # whether or not the implementation supports misaligned atomics + MISALIGNED_AMO: false + + HPM_COUNTER_EN: + - false # CY + - false # empty + - false # IR + - true # HPM3 + - true # HPM4 + - true # HPM5 + - true # HPM6 + - true # HPM7 + - true # HPM8 + - true # HPM9 + - true # HPM10 + - false # HPM11 + - false # HPM12 + - false # HPM13 + - false # HPM14 + - false # HPM15 + - false # HPM16 + - false # HPM17 + - false # HPM18 + - false # HPM19 + - false # HPM20 + - false # HPM21 + - false # HPM22 + - false # HPM23 + - false # HPM24 + - false # HPM25 + - false # HPM26 + - false # HPM27 + - false # HPM28 + - false # HPM29 + - false # HPM30 + - false # HPM31 + + # list of defined HPM events + HPM_EVENTS: + - 0 + - 3 + + # Indicates which counters can be disabled from mcountinhibit + # + # An unimplemented counter cannot be specified, i.e., if + # NUM_HPM_COUNTERS == 8, it would be illegal to add index + # 11 in COUNTINHIBIT_EN since the highest implemented counter + # would be at bit 10 + COUNTINHIBIT_EN: + - true # CY + - false # empty + - true # IR + - true # HPM3 + - true # HPM4 + - true # HPM5 + - true # HPM6 + - true # HPM7 + - true # HPM8 + - true # HPM9 + - true # HPM10 + - false # HPM11 + - false # HPM12 + - false # HPM13 + - false # HPM14 + - false # HPM15 + - false # HPM16 + - false # HPM17 + - false # HPM18 + - false # HPM19 + - false # HPM20 + - false # HPM21 + - false # HPM22 + - false # HPM23 + - false # HPM24 + - false # HPM25 + - false # HPM26 + - false # HPM27 + - false # HPM28 + - false # HPM29 + - false # HPM30 + - false # HPM31 + + # Indicates which counters can delegated via mcounteren + # + # An unimplemented counter cannot be specified, i.e., if + # NUM_HPM_COUNTERS == 8, it would be illegal to add index + # 11 in COUNTEN_EN since the highest implemented counter + # would be at bit 10 + MCOUNTENABLE_EN: + - true # CY + - false # TM + - true # IR + - true # HPM3 + - true # HPM4 + - true # HPM5 + - true # HPM6 + - true # HPM7 + - true # HPM8 + - true # HPM9 + - true # HPM10 + - false # HPM11 + - false # HPM12 + - false # HPM13 + - false # HPM14 + - false # HPM15 + - false # HPM16 + - false # HPM17 + - false # HPM18 + - false # HPM19 + - false # HPM20 + - false # HPM21 + - false # HPM22 + - false # HPM23 + - false # HPM24 + - false # HPM25 + - false # HPM26 + - false # HPM27 + - false # HPM28 + - false # HPM29 + - false # HPM30 + - false # HPM31 + + # Indicates which counters can delegated via scounteren + # + # An unimplemented counter cannot be specified, i.e., if + # NUM_HPM_COUNTERS == 8, it would be illegal to add index + # 11 in COUNTEN_EN since the highest implemented counter + # would be at bit 10 + SCOUNTENABLE_EN: + - true # CY + - false # TM + - true # IR + - true # HPM3 + - true # HPM4 + - true # HPM5 + - true # HPM6 + - true # HPM7 + - true # HPM8 + - true # HPM9 + - true # HPM10 + - false # HPM11 + - false # HPM12 + - false # HPM13 + - false # HPM14 + - false # HPM15 + - false # HPM16 + - false # HPM17 + - false # HPM18 + - false # HPM19 + - false # HPM20 + - false # HPM21 + - false # HPM22 + - false # HPM23 + - false # HPM24 + - false # HPM25 + - false # HPM26 + - false # HPM27 + - false # HPM28 + - false # HPM29 + - false # HPM30 + - false # HPM31 + + # Indicates which counters can delegated via hcounteren + # + # An unimplemented counter cannot be specified, i.e., if + # NUM_HPM_COUNTERS == 8, it would be illegal to add index + # 11 in COUNTEN_EN since the highest implemented counter + # would be at bit 10 + HCOUNTENABLE_EN: + - true # CY + - false # TM + - true # IR + - true # HPM3 + - true # HPM4 + - true # HPM5 + - true # HPM6 + - true # HPM7 + - true # HPM8 + - true # HPM9 + - true # HPM10 + - false # HPM11 + - false # HPM12 + - false # HPM13 + - false # HPM14 + - false # HPM15 + - false # HPM16 + - false # HPM17 + - false # HPM18 + - false # HPM19 + - false # HPM20 + - false # HPM21 + - false # HPM22 + - false # HPM23 + - false # HPM24 + - false # HPM25 + - false # HPM26 + - false # HPM27 + - false # HPM28 + - false # HPM29 + - false # HPM30 + - false # HPM31 + + # when true, writing an illegal value to a WLRL CSR field raises an Illegal Instruction exception + # when false, writing an illegal value to a WLRL CSR field is ignored + TRAP_ON_ILLEGAL_WLRL: true + TRAP_ON_UNIMPLEMENTED_INSTRUCTION: true + TRAP_ON_RESERVED_INSTRUCTION: true + TRAP_ON_UNIMPLEMENTED_CSR: true + + # Whether or not a real hardware `time` CSR exists. Implementations can either provide a real + # CSR or trap and emulate access at M-mode. + TIME_CSR_IMPLEMENTED: true + + # Whether or not the `misa` CSR returns zero or a non-zero value. + MISA_CSR_IMPLEMENTED: true + + # when true, *tval is written with the virtual PC of the EBREAK instruction (same information as *epc) + # when false, *tval is written with 0 on an EBREAK instruction + # + # regardless, *tval is always written with a virtual PC when an external breakpoint is generated + REPORT_VA_IN_MTVAL_ON_BREAKPOINT: true + + REPORT_VA_IN_MTVAL_ON_LOAD_MISALIGNED: true + REPORT_VA_IN_MTVAL_ON_STORE_AMO_MISALIGNED: true + REPORT_VA_IN_MTVAL_ON_INSTRUCTION_MISALIGNED: true + REPORT_VA_IN_MTVAL_ON_LOAD_ACCESS_FAULT: true + REPORT_VA_IN_MTVAL_ON_STORE_AMO_ACCESS_FAULT: true + REPORT_VA_IN_MTVAL_ON_INSTRUCTION_ACCESS_FAULT: true + REPORT_VA_IN_MTVAL_ON_LOAD_PAGE_FAULT: true + REPORT_VA_IN_MTVAL_ON_STORE_AMO_PAGE_FAULT: true + REPORT_VA_IN_MTVAL_ON_INSTRUCTION_PAGE_FAULT: true + REPORT_ENCODING_IN_MTVAL_ON_ILLEGAL_INSTRUCTION: true + # REPORT_CAUSE_IN_MTVAL_ON_SOFTWARE_CHECK: true + MTVAL_WIDTH: 64 # must check that this can hold any valid VA if any REPORT_VA* or Sdext, and, if REPORT_ENCODING*, at least [MXLEN, ILEN].min bits + + REPORT_VA_IN_STVAL_ON_BREAKPOINT: true + REPORT_VA_IN_STVAL_ON_LOAD_MISALIGNED: true + REPORT_VA_IN_STVAL_ON_STORE_AMO_MISALIGNED: true + REPORT_VA_IN_STVAL_ON_INSTRUCTION_MISALIGNED: true + REPORT_VA_IN_STVAL_ON_LOAD_ACCESS_FAULT: true + REPORT_VA_IN_STVAL_ON_STORE_AMO_ACCESS_FAULT: true + REPORT_VA_IN_STVAL_ON_INSTRUCTION_ACCESS_FAULT: true + REPORT_VA_IN_STVAL_ON_LOAD_PAGE_FAULT: true + REPORT_VA_IN_STVAL_ON_STORE_AMO_PAGE_FAULT: true + REPORT_VA_IN_STVAL_ON_INSTRUCTION_PAGE_FAULT: true + REPORT_ENCODING_IN_STVAL_ON_ILLEGAL_INSTRUCTION: true + # REPORT_CAUSE_IN_STVAL_ON_SOFTWARE_CHECK: true + STVAL_WIDTH: 64 # must check that this can hold any valid VA, and, if REPORT_ENCODING*, at least [SXLEN, ILEN].min bits + + REPORT_VA_IN_VSTVAL_ON_BREAKPOINT: true + REPORT_VA_IN_VSTVAL_ON_LOAD_MISALIGNED: true + REPORT_VA_IN_VSTVAL_ON_STORE_AMO_MISALIGNED: true + REPORT_VA_IN_VSTVAL_ON_INSTRUCTION_MISALIGNED: true + REPORT_VA_IN_VSTVAL_ON_LOAD_ACCESS_FAULT: true + REPORT_VA_IN_VSTVAL_ON_STORE_AMO_ACCESS_FAULT: true + REPORT_VA_IN_VSTVAL_ON_INSTRUCTION_ACCESS_FAULT: true + REPORT_VA_IN_VSTVAL_ON_LOAD_PAGE_FAULT: true + REPORT_VA_IN_VSTVAL_ON_STORE_AMO_PAGE_FAULT: true + REPORT_VA_IN_VSTVAL_ON_INSTRUCTION_PAGE_FAULT: true + REPORT_ENCODING_IN_VSTVAL_ON_ILLEGAL_INSTRUCTION: true + # REPORT_CAUSE_IN_VSTVAL_ON_SOFTWARE_CHECK: true + # VSTVAL_WIDTH not needed; "vstval is a WARL register that must be able to hold the same set of values that stval can hold" + + + # address of the unified discovery configuration data structure + # this address is reported in the mconfigptr CSR + CONFIG_PTR_ADDRESS: 0x1000 + + # number of implemented PMP entries. Can be any value between 0-64, inclusive. + # + # the number of implemented PMP registers must be 0, 16, or 64. + # + # Therefore, whether or not a pmpaddrN or pmpcfgN register exists depends on + # NUM_PMP_ENTRIES as follows: + # |=== + # | NUM_PMP_ENTRIES | pmpaddr<0-15> / pmpcfg<0-3> | pmpaddr<16-63> / pmpcfg<4-15> + # | 0 | N | N + # | 1-16 | Y | N + # | 17-64 | Y | Y + # |=== + # ** pmpcfgN for an odd N never exist when XLEN == 64 + # + # when NUM_PMP_ENTRIES is not exactly 0, 16, or 64, some extant pmp registers, + # and associated pmpNcfg, will be read-only zero (but will not cause an exception). + NUM_PMP_ENTRIES: 14 + + # log2 of the smallest supported PMP region + # generally, for systems with an MMU, should not be smaller than 12, + # as that would preclude caching PMP results in the TLB along with + # virtual memory translations + # + # Note that PMP_GRANULARITY is equal to G+2 (not G) as described in + # the privileged architecture + PMP_GRANULARITY: 12 + + # log2 of the smallest supported PMA region + # generally, for systems with an MMU, should not be smaller than 12, + # as that would preclude caching PMP results in the TLB along with + # virtual memory translations + PMA_GRANULARITY: 12 + + # number of bits in the physical address space + PHYS_ADDR_WIDTH: 56 + + # number of implemented ASID bits + # maximum value is 16 + ASID_WIDTH: 12 + + # when the A extensions is supported, indicates whether or not + # the extension can be disabled in the `misa.A` bit. + MUTABLE_MISA_A: false + + # when the B extensions is supported, indicates whether or not + # the extension can be disabled in the `misa.B` bit. + MUTABLE_MISA_B: false + + # when the C extensions is supported, indicates whether or not + # the extension can be disabled in the `misa.C` bit. + MUTABLE_MISA_C: false + + # when the D extensions is supported, indicates whether or not + # the extension can be disabled in the `misa.D` bit. + MUTABLE_MISA_D: false + + # when the F extensions is supported, indicates whether or not + # the extension can be disabled in the `misa.F` bit. + MUTABLE_MISA_F: false + + # when the H extensions is supported, indicates whether or not + # the extension can be disabled in the `misa.H` bit. + MUTABLE_MISA_H: false + + # when the M extensions is supported, indicates whether or not + # the extension can be disabled in the `misa.M` bit. + MUTABLE_MISA_M: false + + # when the S extensions is supported, indicates whether or not + # the extension can be disabled in the `misa.S` bit. + MUTABLE_MISA_S: false + + # when the U extensions is supported, indicates whether or not + # the extension can be disabled in the `misa.U` bit. + MUTABLE_MISA_U: false + + # when the V extensions is supported, indicates whether or not + # the extension can be disabled in the `misa.V` bit. + MUTABLE_MISA_V: false + + # size of a cache block, in bytes + CACHE_BLOCK_SIZE: 64 + + # number of supported virtualized guest interrupts + # corresponds to the `GEILEN` parameter in the RVI specs + NUM_EXTERNAL_GUEST_INTERRUPTS: 4 + + # Endianess of data in M-mode. Can be one of: + # + # * little: M-mode data is always little endian + # * big: M-mode data is always big endian + # * dynamic: M-mode data can be either little or big endian, depending on the RW CSR field mstatus.MBE + M_MODE_ENDIANESS: little + + # Endianess of data in M-mode. Can be one of: + # + # * little: S-mode data is always little endian + # * big: S-mode data is always big endian + # * dynamic: S-mode data can be either little or big endian, depending on the RW CSR field mstatus.SBE + S_MODE_ENDIANESS: little + + # Endianess of data in M-mode. Can be one of: + # + # * litte: U-mode data is always little endian + # * big: U-mode data is always big endian + # * dynamic: U-mode data can be either little or big endian, depending on the RW CSR field mstatus.UBE + U_MODE_ENDIANESS: little + + # Endianess of data in VU-mode. Can be one of: + # + # * little: VU-mode data is always little endian + # * big: VU-mode data is always big endian + # * dynamic: VU-mode data can be either little or big endian, depending on the RW CSR field vsstatus.UBE + VU_MODE_ENDIANESS: little + + # Endianess of data in VS-mode. Can be one of: + # + # * little: VS-mode data is always little endian + # * big: VS-mode data is always big endian + # * dynamic: VS-mode data can be either little or big endian, depending on the RW CSR field hstatus.VSBE + VS_MODE_ENDIANESS: little + + # XLENs supported in S-mode. Can be one of: + # + # * 32: SXLEN is always 32 + # * 64: SXLEN is always 64 + # * 3264: SXLEN can be changed (via mstatus.SXL) between 32 and 64 + SXLEN: 64 + + # XLENs supported in U-mode. Can be one of: + # + # * 32: SXLEN is always 32 + # * 64: SXLEN is always 64 + # * 3264: SXLEN can be changed (via mstatus.SXL) between 32 and 64 + UXLEN: 64 + + # XLENs supported in VS-mode. Can be one of: + # + # * 32: VSXLEN is always 32 + # * 64: VSXLEN is always 64 + # * 3264: VSXLEN can be changed (via hstatus.VSXL) between 32 and 64 + VSXLEN: 64 + + # XLENs supported in VS-mode. Can be one of: + # + # * 32: VSXLEN is always 32 + # * 64: VSXLEN is always 64 + # * 3264: VSXLEN can be changed (via hstatus.VSXL) between 32 and 64 + VUXLEN: 64 + + # Strategy used to handle reservation sets + # + # * "reserve naturally-aligned 64-byte region": Always reserve the 64-byte block containing the LR/SC address + # * "reserve naturally-aligned 128-byte region": Always reserve the 128-byte block containing the LR/SC address + # * "reserve exactly enough to cover the access": Always reserve exactly the LR/SC access, and no more + # * "custom": Custom behavior, leading to an 'unpredictable' call on any LR/SC + LRSC_RESERVATION_STRATEGY: reserve naturally-aligned 64-byte region + + # whether or not an SC will fail if its VA does not match the VA of the prior LR, + # even if the physical address of the SC and LR are the same + LRSC_FAIL_ON_VA_SYNONYM: false + + # what to do when an LR/SC address is misaligned: + # + # * 'always raise misaligned exception': self-explainitory + # * 'always raise access fault': self-explainitory + # * 'custom': Custom behavior; misaligned LR/SC may sometimes raise a misaligned exception and sometimes raise a access fault. Will lead to an 'unpredictable' call on any misaligned LR/SC access + LRSC_MISALIGNED_BEHAVIOR: always raise misaligned exception + + # whether or not a Store Conditional fails if its physical address and size do not + # exactly match the physical address and size of the last Load Reserved in program order + # (independent of whether or not the SC is in the current reservation set) + LRSC_FAIL_ON_NON_EXACT_LRSC: false + + # Whether writes from M-mode, U-mode, or S-mode to vsatp with an illegal mode setting are + # ignored (as they are with satp), or if they are treated as WARL, leading to undpredictable + # behavior. + IGNORE_INVALID_VSATP_MODE_WRITES_WHEN_V_EQ_ZERO: true + + GSTAGE_MODE_BARE: true + SV32_VSMODE_TRANSLATION: false + SV39_VSMODE_TRANSLATION: true + SV48_VSMODE_TRANSLATION: true + SV57_VSMODE_TRANSLATION: true + SV32X4_TRANSLATION: false + SV39X4_TRANSLATION: true + SV48X4_TRANSLATION: true + SV57X4_TRANSLATION: false + VMID_WIDTH: 8 + SV_MODE_BARE: true + STVEC_MODE_DIRECT: true + STVEC_MODE_VECTORED: true + SATP_MODE_BARE: true + REPORT_GPA_IN_TVAL_ON_LOAD_GUEST_PAGE_FAULT: true + REPORT_GPA_IN_TVAL_ON_STORE_AMO_GUEST_PAGE_FAULT: true + REPORT_GPA_IN_TVAL_ON_INSTRUCTION_GUEST_PAGE_FAULT: true + REPORT_GPA_IN_TVAL_ON_INTERMEDIATE_GUEST_PAGE_FAULT: true + TINST_VALUE_ON_FINAL_LOAD_GUEST_PAGE_FAULT: "always transformed standard instruction" + TINST_VALUE_ON_FINAL_STORE_AMO_GUEST_PAGE_FAULT: "always transformed standard instruction" + TINST_VALUE_ON_FINAL_INSTRUCTION_GUEST_PAGE_FAULT: "always zero" + TINST_VALUE_ON_INSTRUCTION_ADDRESS_MISALIGNED: "always zero" + TINST_VALUE_ON_BREAKPOINT: "always zero" + TINST_VALUE_ON_VIRTUAL_INSTRUCTION: "always zero" + TINST_VALUE_ON_LOAD_ADDRESS_MISALIGNED: "always zero" + TINST_VALUE_ON_LOAD_ACCESS_FAULT: "always zero" + TINST_VALUE_ON_STORE_AMO_ADDRESS_MISALIGNED: "always zero" + TINST_VALUE_ON_STORE_AMO_ACCESS_FAULT: "always zero" + TINST_VALUE_ON_UCALL: "always zero" + TINST_VALUE_ON_SCALL: "always zero" + TINST_VALUE_ON_MCALL: "always zero" + TINST_VALUE_ON_VSCALL: "always zero" + TINST_VALUE_ON_LOAD_PAGE_FAULT: "always zero" + TINST_VALUE_ON_STORE_AMO_PAGE_FAULT: "always zero" + MTVEC_MODES: [0, 1] + MSTATUS_FS_LEGAL_VALUES: [0,1,2,3] + MSTATUS_FS_WRITEABLE: true + MSTATUS_TVM_IMPLEMENTED: true + HW_MSTATUS_FS_DIRTY_UPDATE: precise + MSTATUS_VS_WRITEABLE: true + MSTATUS_VS_LEGAL_VALUES: [0,1,2,3] + HW_MSTATUS_VS_DIRTY_UPDATE: precise + FORCE_UPGRADE_CBO_INVAL_TO_FLUSH: true + REPORT_GPA_IN_HTVAL_ON_GUEST_PAGE_FAULT: true + VSTVEC_MODE_DIRECT: true + VSTVEC_MODE_VECTORED: true diff --git a/cfgs/generic_rv64/implemented_exts.yaml b/cfgs/generic_rv64/implemented_exts.yaml deleted file mode 100644 index 9a0b8a2cd..000000000 --- a/cfgs/generic_rv64/implemented_exts.yaml +++ /dev/null @@ -1,31 +0,0 @@ -# $schema=../../schemas/implemented_exts_schema.json - -implemented_extensions: - - [A, "2.1.0"] - - [B, "1.0.0"] - - [C, "2.2.0"] - - [D, "2.2.0"] - - [F, "2.2.0"] - - [I, "2.1.0"] - - [H, "1.0.0"] - - [M, "2.0.0"] - - [S, "1.12.0"] - - [Sm, "1.12.0"] - - [Smhpm, "1.12.0"] - - [Smpmp, "1.12.0"] - - [U, "1.12.0"] - - [V, "1.0.0"] - - [Zicntr, "2.0.0"] - - [Zicsr, "2.0.0"] - - [Zihpm, "2.0.0"] - - [Smaia, "1.0.0"] - - [Smcdeleg, "1.0.0"] - - [Smcntrpmf, "1.0.0"] - - [Sscofpmf, "1.0.0"] - - [Ssaia, "1.0.0"] - - [Ssccfg, "1.0.0"] - - [Sstc, "0.9.0"] - - [Sv39, "1.12.0"] - - [Sv48, "1.12.0"] - - [Zicboz, "1.0.1-b34ea8a"] - - [Zicbom, "1.0.1-b34ea8a"] diff --git a/cfgs/generic_rv64/params.yaml b/cfgs/generic_rv64/params.yaml deleted file mode 100644 index 4ed77881a..000000000 --- a/cfgs/generic_rv64/params.yaml +++ /dev/null @@ -1,520 +0,0 @@ - ---- -params: - XLEN: 64 - - # name of the configuration - NAME: generic_rv64 - - # vendor-specific architecture ID in marchid - ARCH_ID: 0x1000000000000000 - - # vendor-specific implementation ID in mimpid - IMP_ID: 0x0 - - # JEDEC Vendor ID bank - VENDOR_ID_BANK: 0x0 - - # JEDEC Vendor ID offset - VENDOR_ID_OFFSET: 0x0 - - # whether or not the implementation supports misaligned loads and stores in main memory (not including atomics) - # must be true when Zicclsm is supported - MISALIGNED_LDST: true - - MISALIGNED_LDST_EXCEPTION_PRIORITY: high - - MISALIGNED_MAX_ATOMICITY_GRANULE_SIZE: 0 - - MISALIGNED_SPLIT_STRATEGY: by_byte - - # whether or not the implementation supports misaligned atomics - MISALIGNED_AMO: false - - HPM_COUNTER_EN: - - false # CY - - false # empty - - false # IR - - true # HPM3 - - true # HPM4 - - true # HPM5 - - true # HPM6 - - true # HPM7 - - true # HPM8 - - true # HPM9 - - true # HPM10 - - false # HPM11 - - false # HPM12 - - false # HPM13 - - false # HPM14 - - false # HPM15 - - false # HPM16 - - false # HPM17 - - false # HPM18 - - false # HPM19 - - false # HPM20 - - false # HPM21 - - false # HPM22 - - false # HPM23 - - false # HPM24 - - false # HPM25 - - false # HPM26 - - false # HPM27 - - false # HPM28 - - false # HPM29 - - false # HPM30 - - false # HPM31 - - # list of defined HPM events - HPM_EVENTS: - - 0 - - 3 - - # Indicates which counters can be disabled from mcountinhibit - # - # An unimplemented counter cannot be specified, i.e., if - # NUM_HPM_COUNTERS == 8, it would be illegal to add index - # 11 in COUNTINHIBIT_EN since the highest implemented counter - # would be at bit 10 - COUNTINHIBIT_EN: - - true # CY - - false # empty - - true # IR - - true # HPM3 - - true # HPM4 - - true # HPM5 - - true # HPM6 - - true # HPM7 - - true # HPM8 - - true # HPM9 - - true # HPM10 - - false # HPM11 - - false # HPM12 - - false # HPM13 - - false # HPM14 - - false # HPM15 - - false # HPM16 - - false # HPM17 - - false # HPM18 - - false # HPM19 - - false # HPM20 - - false # HPM21 - - false # HPM22 - - false # HPM23 - - false # HPM24 - - false # HPM25 - - false # HPM26 - - false # HPM27 - - false # HPM28 - - false # HPM29 - - false # HPM30 - - false # HPM31 - - # Indicates which counters can delegated via mcounteren - # - # An unimplemented counter cannot be specified, i.e., if - # NUM_HPM_COUNTERS == 8, it would be illegal to add index - # 11 in COUNTEN_EN since the highest implemented counter - # would be at bit 10 - MCOUNTENABLE_EN: - - true # CY - - false # TM - - true # IR - - true # HPM3 - - true # HPM4 - - true # HPM5 - - true # HPM6 - - true # HPM7 - - true # HPM8 - - true # HPM9 - - true # HPM10 - - false # HPM11 - - false # HPM12 - - false # HPM13 - - false # HPM14 - - false # HPM15 - - false # HPM16 - - false # HPM17 - - false # HPM18 - - false # HPM19 - - false # HPM20 - - false # HPM21 - - false # HPM22 - - false # HPM23 - - false # HPM24 - - false # HPM25 - - false # HPM26 - - false # HPM27 - - false # HPM28 - - false # HPM29 - - false # HPM30 - - false # HPM31 - - # Indicates which counters can delegated via scounteren - # - # An unimplemented counter cannot be specified, i.e., if - # NUM_HPM_COUNTERS == 8, it would be illegal to add index - # 11 in COUNTEN_EN since the highest implemented counter - # would be at bit 10 - SCOUNTENABLE_EN: - - true # CY - - false # TM - - true # IR - - true # HPM3 - - true # HPM4 - - true # HPM5 - - true # HPM6 - - true # HPM7 - - true # HPM8 - - true # HPM9 - - true # HPM10 - - false # HPM11 - - false # HPM12 - - false # HPM13 - - false # HPM14 - - false # HPM15 - - false # HPM16 - - false # HPM17 - - false # HPM18 - - false # HPM19 - - false # HPM20 - - false # HPM21 - - false # HPM22 - - false # HPM23 - - false # HPM24 - - false # HPM25 - - false # HPM26 - - false # HPM27 - - false # HPM28 - - false # HPM29 - - false # HPM30 - - false # HPM31 - - # Indicates which counters can delegated via hcounteren - # - # An unimplemented counter cannot be specified, i.e., if - # NUM_HPM_COUNTERS == 8, it would be illegal to add index - # 11 in COUNTEN_EN since the highest implemented counter - # would be at bit 10 - HCOUNTENABLE_EN: - - true # CY - - false # TM - - true # IR - - true # HPM3 - - true # HPM4 - - true # HPM5 - - true # HPM6 - - true # HPM7 - - true # HPM8 - - true # HPM9 - - true # HPM10 - - false # HPM11 - - false # HPM12 - - false # HPM13 - - false # HPM14 - - false # HPM15 - - false # HPM16 - - false # HPM17 - - false # HPM18 - - false # HPM19 - - false # HPM20 - - false # HPM21 - - false # HPM22 - - false # HPM23 - - false # HPM24 - - false # HPM25 - - false # HPM26 - - false # HPM27 - - false # HPM28 - - false # HPM29 - - false # HPM30 - - false # HPM31 - - # when true, writing an illegal value to a WLRL CSR field raises an Illegal Instruction exception - # when false, writing an illegal value to a WLRL CSR field is ignored - TRAP_ON_ILLEGAL_WLRL: true - TRAP_ON_UNIMPLEMENTED_INSTRUCTION: true - TRAP_ON_RESERVED_INSTRUCTION: true - TRAP_ON_UNIMPLEMENTED_CSR: true - - # Whether or not a real hardware `time` CSR exists. Implementations can either provide a real - # CSR or trap and emulate access at M-mode. - TIME_CSR_IMPLEMENTED: true - - # Whether or not the `misa` CSR returns zero or a non-zero value. - MISA_CSR_IMPLEMENTED: true - - # when true, *tval is written with the virtual PC of the EBREAK instruction (same information as *epc) - # when false, *tval is written with 0 on an EBREAK instruction - # - # regardless, *tval is always written with a virtual PC when an external breakpoint is generated - REPORT_VA_IN_MTVAL_ON_BREAKPOINT: true - - REPORT_VA_IN_MTVAL_ON_LOAD_MISALIGNED: true - REPORT_VA_IN_MTVAL_ON_STORE_AMO_MISALIGNED: true - REPORT_VA_IN_MTVAL_ON_INSTRUCTION_MISALIGNED: true - REPORT_VA_IN_MTVAL_ON_LOAD_ACCESS_FAULT: true - REPORT_VA_IN_MTVAL_ON_STORE_AMO_ACCESS_FAULT: true - REPORT_VA_IN_MTVAL_ON_INSTRUCTION_ACCESS_FAULT: true - REPORT_VA_IN_MTVAL_ON_LOAD_PAGE_FAULT: true - REPORT_VA_IN_MTVAL_ON_STORE_AMO_PAGE_FAULT: true - REPORT_VA_IN_MTVAL_ON_INSTRUCTION_PAGE_FAULT: true - REPORT_ENCODING_IN_MTVAL_ON_ILLEGAL_INSTRUCTION: true - # REPORT_CAUSE_IN_MTVAL_ON_SOFTWARE_CHECK: true - MTVAL_WIDTH: 64 # must check that this can hold any valid VA if any REPORT_VA* or Sdext, and, if REPORT_ENCODING*, at least [MXLEN, ILEN].min bits - - REPORT_VA_IN_STVAL_ON_BREAKPOINT: true - REPORT_VA_IN_STVAL_ON_LOAD_MISALIGNED: true - REPORT_VA_IN_STVAL_ON_STORE_AMO_MISALIGNED: true - REPORT_VA_IN_STVAL_ON_INSTRUCTION_MISALIGNED: true - REPORT_VA_IN_STVAL_ON_LOAD_ACCESS_FAULT: true - REPORT_VA_IN_STVAL_ON_STORE_AMO_ACCESS_FAULT: true - REPORT_VA_IN_STVAL_ON_INSTRUCTION_ACCESS_FAULT: true - REPORT_VA_IN_STVAL_ON_LOAD_PAGE_FAULT: true - REPORT_VA_IN_STVAL_ON_STORE_AMO_PAGE_FAULT: true - REPORT_VA_IN_STVAL_ON_INSTRUCTION_PAGE_FAULT: true - REPORT_ENCODING_IN_STVAL_ON_ILLEGAL_INSTRUCTION: true - # REPORT_CAUSE_IN_STVAL_ON_SOFTWARE_CHECK: true - STVAL_WIDTH: 64 # must check that this can hold any valid VA, and, if REPORT_ENCODING*, at least [SXLEN, ILEN].min bits - - REPORT_VA_IN_VSTVAL_ON_BREAKPOINT: true - REPORT_VA_IN_VSTVAL_ON_LOAD_MISALIGNED: true - REPORT_VA_IN_VSTVAL_ON_STORE_AMO_MISALIGNED: true - REPORT_VA_IN_VSTVAL_ON_INSTRUCTION_MISALIGNED: true - REPORT_VA_IN_VSTVAL_ON_LOAD_ACCESS_FAULT: true - REPORT_VA_IN_VSTVAL_ON_STORE_AMO_ACCESS_FAULT: true - REPORT_VA_IN_VSTVAL_ON_INSTRUCTION_ACCESS_FAULT: true - REPORT_VA_IN_VSTVAL_ON_LOAD_PAGE_FAULT: true - REPORT_VA_IN_VSTVAL_ON_STORE_AMO_PAGE_FAULT: true - REPORT_VA_IN_VSTVAL_ON_INSTRUCTION_PAGE_FAULT: true - REPORT_ENCODING_IN_VSTVAL_ON_ILLEGAL_INSTRUCTION: true - # REPORT_CAUSE_IN_VSTVAL_ON_SOFTWARE_CHECK: true - # VSTVAL_WIDTH not needed; "vstval is a WARL register that must be able to hold the same set of values that stval can hold" - - - # address of the unified discovery configuration data structure - # this address is reported in the mconfigptr CSR - CONFIG_PTR_ADDRESS: 0x1000 - - # number of implemented PMP entries. Can be any value between 0-64, inclusive. - # - # the number of implemented PMP registers must be 0, 16, or 64. - # - # Therefore, whether or not a pmpaddrN or pmpcfgN register exists depends on - # NUM_PMP_ENTRIES as follows: - # |=== - # | NUM_PMP_ENTRIES | pmpaddr<0-15> / pmpcfg<0-3> | pmpaddr<16-63> / pmpcfg<4-15> - # | 0 | N | N - # | 1-16 | Y | N - # | 17-64 | Y | Y - # |=== - # ** pmpcfgN for an odd N never exist when XLEN == 64 - # - # when NUM_PMP_ENTRIES is not exactly 0, 16, or 64, some extant pmp registers, - # and associated pmpNcfg, will be read-only zero (but will not cause an exception). - NUM_PMP_ENTRIES: 14 - - # log2 of the smallest supported PMP region - # generally, for systems with an MMU, should not be smaller than 12, - # as that would preclude caching PMP results in the TLB along with - # virtual memory translations - # - # Note that PMP_GRANULARITY is equal to G+2 (not G) as described in - # the privileged architecture - PMP_GRANULARITY: 12 - - # log2 of the smallest supported PMA region - # generally, for systems with an MMU, should not be smaller than 12, - # as that would preclude caching PMP results in the TLB along with - # virtual memory translations - PMA_GRANULARITY: 12 - - # number of bits in the physical address space - PHYS_ADDR_WIDTH: 56 - - # number of implemented ASID bits - # maximum value is 16 - ASID_WIDTH: 12 - - # when the A extensions is supported, indicates whether or not - # the extension can be disabled in the `misa.A` bit. - MUTABLE_MISA_A: false - - # when the B extensions is supported, indicates whether or not - # the extension can be disabled in the `misa.B` bit. - MUTABLE_MISA_B: false - - # when the C extensions is supported, indicates whether or not - # the extension can be disabled in the `misa.C` bit. - MUTABLE_MISA_C: false - - # when the D extensions is supported, indicates whether or not - # the extension can be disabled in the `misa.D` bit. - MUTABLE_MISA_D: false - - # when the F extensions is supported, indicates whether or not - # the extension can be disabled in the `misa.F` bit. - MUTABLE_MISA_F: false - - # when the H extensions is supported, indicates whether or not - # the extension can be disabled in the `misa.H` bit. - MUTABLE_MISA_H: false - - # when the M extensions is supported, indicates whether or not - # the extension can be disabled in the `misa.M` bit. - MUTABLE_MISA_M: false - - # when the S extensions is supported, indicates whether or not - # the extension can be disabled in the `misa.S` bit. - MUTABLE_MISA_S: false - - # when the U extensions is supported, indicates whether or not - # the extension can be disabled in the `misa.U` bit. - MUTABLE_MISA_U: false - - # when the V extensions is supported, indicates whether or not - # the extension can be disabled in the `misa.V` bit. - MUTABLE_MISA_V: false - - # size of a cache block, in bytes - CACHE_BLOCK_SIZE: 64 - - # number of supported virtualized guest interrupts - # corresponds to the `GEILEN` parameter in the RVI specs - NUM_EXTERNAL_GUEST_INTERRUPTS: 4 - - # Endianess of data in M-mode. Can be one of: - # - # * little: M-mode data is always little endian - # * big: M-mode data is always big endian - # * dynamic: M-mode data can be either little or big endian, depending on the RW CSR field mstatus.MBE - M_MODE_ENDIANESS: little - - # Endianess of data in M-mode. Can be one of: - # - # * little: S-mode data is always little endian - # * big: S-mode data is always big endian - # * dynamic: S-mode data can be either little or big endian, depending on the RW CSR field mstatus.SBE - S_MODE_ENDIANESS: little - - # Endianess of data in M-mode. Can be one of: - # - # * litte: U-mode data is always little endian - # * big: U-mode data is always big endian - # * dynamic: U-mode data can be either little or big endian, depending on the RW CSR field mstatus.UBE - U_MODE_ENDIANESS: little - - # Endianess of data in VU-mode. Can be one of: - # - # * little: VU-mode data is always little endian - # * big: VU-mode data is always big endian - # * dynamic: VU-mode data can be either little or big endian, depending on the RW CSR field vsstatus.UBE - VU_MODE_ENDIANESS: little - - # Endianess of data in VS-mode. Can be one of: - # - # * little: VS-mode data is always little endian - # * big: VS-mode data is always big endian - # * dynamic: VS-mode data can be either little or big endian, depending on the RW CSR field hstatus.VSBE - VS_MODE_ENDIANESS: little - - # XLENs supported in S-mode. Can be one of: - # - # * 32: SXLEN is always 32 - # * 64: SXLEN is always 64 - # * 3264: SXLEN can be changed (via mstatus.SXL) between 32 and 64 - SXLEN: 64 - - # XLENs supported in U-mode. Can be one of: - # - # * 32: SXLEN is always 32 - # * 64: SXLEN is always 64 - # * 3264: SXLEN can be changed (via mstatus.SXL) between 32 and 64 - UXLEN: 64 - - # XLENs supported in VS-mode. Can be one of: - # - # * 32: VSXLEN is always 32 - # * 64: VSXLEN is always 64 - # * 3264: VSXLEN can be changed (via hstatus.VSXL) between 32 and 64 - VSXLEN: 64 - - # XLENs supported in VS-mode. Can be one of: - # - # * 32: VSXLEN is always 32 - # * 64: VSXLEN is always 64 - # * 3264: VSXLEN can be changed (via hstatus.VSXL) between 32 and 64 - VUXLEN: 64 - - # Strategy used to handle reservation sets - # - # * "reserve naturally-aligned 64-byte region": Always reserve the 64-byte block containing the LR/SC address - # * "reserve naturally-aligned 128-byte region": Always reserve the 128-byte block containing the LR/SC address - # * "reserve exactly enough to cover the access": Always reserve exactly the LR/SC access, and no more - # * "custom": Custom behavior, leading to an 'unpredictable' call on any LR/SC - LRSC_RESERVATION_STRATEGY: reserve naturally-aligned 64-byte region - - # whether or not an SC will fail if its VA does not match the VA of the prior LR, - # even if the physical address of the SC and LR are the same - LRSC_FAIL_ON_VA_SYNONYM: false - - # what to do when an LR/SC address is misaligned: - # - # * 'always raise misaligned exception': self-explainitory - # * 'always raise access fault': self-explainitory - # * 'custom': Custom behavior; misaligned LR/SC may sometimes raise a misaligned exception and sometimes raise a access fault. Will lead to an 'unpredictable' call on any misaligned LR/SC access - LRSC_MISALIGNED_BEHAVIOR: always raise misaligned exception - - # whether or not a Store Conditional fails if its physical address and size do not - # exactly match the physical address and size of the last Load Reserved in program order - # (independent of whether or not the SC is in the current reservation set) - LRSC_FAIL_ON_NON_EXACT_LRSC: false - - # Whether writes from M-mode, U-mode, or S-mode to vsatp with an illegal mode setting are - # ignored (as they are with satp), or if they are treated as WARL, leading to undpredictable - # behavior. - IGNORE_INVALID_VSATP_MODE_WRITES_WHEN_V_EQ_ZERO: true - - GSTAGE_MODE_BARE: true - SV32_VSMODE_TRANSLATION: false - SV39_VSMODE_TRANSLATION: true - SV48_VSMODE_TRANSLATION: true - SV57_VSMODE_TRANSLATION: true - SV32X4_TRANSLATION: false - SV39X4_TRANSLATION: true - SV48X4_TRANSLATION: true - SV57X4_TRANSLATION: false - VMID_WIDTH: 8 - SV_MODE_BARE: true - STVEC_MODE_DIRECT: true - STVEC_MODE_VECTORED: true - SATP_MODE_BARE: true - REPORT_GPA_IN_TVAL_ON_LOAD_GUEST_PAGE_FAULT: true - REPORT_GPA_IN_TVAL_ON_STORE_AMO_GUEST_PAGE_FAULT: true - REPORT_GPA_IN_TVAL_ON_INSTRUCTION_GUEST_PAGE_FAULT: true - REPORT_GPA_IN_TVAL_ON_INTERMEDIATE_GUEST_PAGE_FAULT: true - TINST_VALUE_ON_FINAL_LOAD_GUEST_PAGE_FAULT: "always transformed standard instruction" - TINST_VALUE_ON_FINAL_STORE_AMO_GUEST_PAGE_FAULT: "always transformed standard instruction" - TINST_VALUE_ON_FINAL_INSTRUCTION_GUEST_PAGE_FAULT: "always zero" - TINST_VALUE_ON_INSTRUCTION_ADDRESS_MISALIGNED: "always zero" - TINST_VALUE_ON_BREAKPOINT: "always zero" - TINST_VALUE_ON_VIRTUAL_INSTRUCTION: "always zero" - TINST_VALUE_ON_LOAD_ADDRESS_MISALIGNED: "always zero" - TINST_VALUE_ON_LOAD_ACCESS_FAULT: "always zero" - TINST_VALUE_ON_STORE_AMO_ADDRESS_MISALIGNED: "always zero" - TINST_VALUE_ON_STORE_AMO_ACCESS_FAULT: "always zero" - TINST_VALUE_ON_UCALL: "always zero" - TINST_VALUE_ON_SCALL: "always zero" - TINST_VALUE_ON_MCALL: "always zero" - TINST_VALUE_ON_VSCALL: "always zero" - TINST_VALUE_ON_LOAD_PAGE_FAULT: "always zero" - TINST_VALUE_ON_STORE_AMO_PAGE_FAULT: "always zero" - MTVEC_MODES: [0, 1] - MSTATUS_FS_LEGAL_VALUES: [0,1,2,3] - MSTATUS_FS_WRITEABLE: true - MSTATUS_TVM_IMPLEMENTED: true - HW_MSTATUS_FS_DIRTY_UPDATE: precise - MSTATUS_VS_WRITEABLE: true - MSTATUS_VS_LEGAL_VALUES: [0,1,2,3] - HW_MSTATUS_VS_DIRTY_UPDATE: precise - FORCE_UPGRADE_CBO_INVAL_TO_FLUSH: true diff --git a/cfgs/rv32/cfg.yaml b/cfgs/rv32/cfg.yaml new file mode 100644 index 000000000..227256f17 --- /dev/null +++ b/cfgs/rv32/cfg.yaml @@ -0,0 +1,15 @@ +# yaml-language-server: $schema=../../schemas/config_schema.json +--- + +$schema: config_schema.json# +kind: architecture configuration +type: partially configured +name: rv32 +description: A generic RV32 system; only MXLEN is known +params: + XLEN: 32 +mandatory_extensions: +- name: "I" + version: ">= 0" +- name: "Sm" + version: ">= 0" diff --git a/cfgs/rv64/cfg.yaml b/cfgs/rv64/cfg.yaml new file mode 100644 index 000000000..294b7f8a2 --- /dev/null +++ b/cfgs/rv64/cfg.yaml @@ -0,0 +1,15 @@ +# yaml-language-server: $schema=../../schemas/config_schema.json +--- + +$schema: config_schema.json# +kind: architecture configuration +type: partially configured +name: rv64 +description: A generic RV32 system; only MXLEN is known +params: + XLEN: 64 +mandatory_extensions: +- name: "I" + version: ">= 0" +- name: "Sm" + version: ">= 0" diff --git a/container.def b/container.def index 534320d65..86d014abf 100644 --- a/container.def +++ b/container.def @@ -27,6 +27,8 @@ From: ubuntu:24.04 apt-get install -y --no-install-recommends ditaa + apt-get install -y --no-install-recommends libyaml-dev + # cleanup apt-get clean autoclean apt-get autoremove -y diff --git a/lib/arch_def.rb b/lib/arch_def.rb index 8bd00cf29..7263c925c 100644 --- a/lib/arch_def.rb +++ b/lib/arch_def.rb @@ -26,28 +26,25 @@ require "forwardable" require "ruby-prof" +require "tilt" + +require_relative "config" +require_relative "specification" -require_relative "validate" require_relative "idl" require_relative "idl/passes/find_return_values" require_relative "idl/passes/gen_adoc" require_relative "idl/passes/prune" -require_relative "idl/passes/reachable_functions" -require_relative "idl/passes/reachable_functions_unevaluated" require_relative "idl/passes/reachable_exceptions" -require_relative "arch_obj_models/manual" -require_relative "arch_obj_models/portfolio" -require_relative "arch_obj_models/profile" -require_relative "arch_obj_models/csr_field" -require_relative "arch_obj_models/csr" -require_relative "arch_obj_models/instruction" -require_relative "arch_obj_models/extension" -require_relative "arch_obj_models/certificate" +require_relative "idl/passes/reachable_functions" + require_relative "template_helpers" include TemplateHelpers -class ArchDef +class ArchDef < Specification + extend Forwardable + # @return [Idl::Compiler] The IDL compiler attr_reader :idl_compiler @@ -56,134 +53,32 @@ class ArchDef # @return [String] Name of this definition. Special names are: # * '_' - The generic architecture, with no configuration settings. - # * '_32' - A generic RV32 architecture, with only one parameter set (XLEN == 32) - # * '_64' - A generic RV64 architecture, with only one parameter set (XLEN == 64) + # * 'rv32' - A generic RV32 architecture, with only one parameter set (XLEN == 32) + # * 'rv64' - A generic RV64 architecture, with only one parameter set (XLEN == 64) attr_reader :name - # @return [Hash] A hash mapping parameter name to value for any parameter that has been configured with a value. May be empty. - attr_reader :param_values - - # @return [Integer] 32 or 64, the XLEN in M-mode - # @return [nil] if the XLEN in M-mode is not configured - attr_reader :mxlen - - # hash for Hash lookup - def hash = @name_sym.hash - - # @return [Idl::SymbolTable] Symbol table with global scope - # @return [nil] if the architecture is not configured (use symtab_32 or symtab_64) - def symtab - raise NotImplementedError, "Un-configured ArchDefs have no symbol table" if @symtab.nil? - - @symtab - end - - def fully_configured? = @arch_def["type"] == "fully configured" - def partially_configured? = @arch_def["type"] == "partially configured" - def unconfigured? = @arch_def["type"] == "unconfigured" - def configured? = @arch_def["type"] != "unconfigured" - def type = @arch_def["type"] - - # Initialize a new configured architecture definition - # - # @param config_name [#to_s] The name of a configuration, which must correspond - # to a folder under $root/cfgs - def initialize(config_name, arch_def_path, overlay_path: nil) - @name = config_name.to_s.freeze - @name_sym = @name.to_sym.freeze - - @idl_compiler = Idl::Compiler.new(self) - - validator = Validator.instance - begin - validator.validate_str(arch_def_path.read, type: :arch) - rescue Validator::SchemaValidationError => e - warn "While parsing unified architecture definition at #{arch_def_path}" - raise e - end - - @arch_def = YAML.load_file(arch_def_path, permitted_classes: [Date]).freeze - @param_values = (@arch_def.key?("params") ? @arch_def["params"] : {}).freeze - @mxlen = @arch_def.dig("params", "XLEN") # might be nil - raise "Must set XLEN for a configured arch def" if @mxlen.nil? && configured? - - @symtab = Idl::SymbolTable.new(self) - custom_globals_path = overlay_path.nil? ? Pathname.new("/does/not/exist") : overlay_path / "isa" / "globals.isa" - idl_path = File.exist?(custom_globals_path) ? custom_globals_path : $root / "arch" / "isa" / "globals.isa" - @global_ast = @idl_compiler.compile_file( - idl_path - ) - @global_ast.add_global_symbols(@symtab) - @symtab.deep_freeze - @global_ast.freeze_tree(@symtab) - @mxlen.freeze - end - - # type check all IDL, including globals, instruction ops, and CSR functions - # - # @param show_progress [Boolean] whether to show progress bars - # @param io [IO] where to write progress bars - # @return [void] - def type_check(show_progress: true, io: $stdout) - io.puts "Type checking IDL code for #{name}..." - progressbar = - if show_progress - ProgressBar.create(title: "Instructions", total: instructions.size) - end + # def_delegator :@specification, :ref - instructions.each do |inst| - progressbar.increment if show_progress - if @mxlen == 32 - inst.type_checked_operation_ast(@idl_compiler, @symtab, 32) if inst.rv32? - elsif @mxlen == 64 - inst.type_checked_operation_ast(@idl_compiler, @symtab, 64) if inst.rv64? - inst.type_checked_operation_ast(@idl_compiler, @symtab, 32) if possible_xlens.include?(32) && inst.rv32? - end - end + # Specification::OBJS.each do |obj_info| + # def_delegator :@specification, obj_info[:fn_name].to_sym + # def_delegator :@specification, "#{obj_info[:fn_name]}_hash".to_sym - progressbar = - if show_progress - ProgressBar.create(title: "CSRs", total: csrs.size) - end + # fn_name = ActiveSupport::Inflector.pluralize(obj_info[:fn_name]).to_sym + # define_method(fn_name) do + # obj = @obj_cache[fn_name] + # return obj unless obj.nil? - csrs.each do |csr| - progressbar.increment if show_progress - if csr.has_custom_sw_read? - if (possible_xlens.include?(32) && csr.defined_in_base32?) || (possible_xlens.include?(64) && csr.defined_in_base64?) - csr.type_checked_sw_read_ast(@symtab) - end - end - csr.fields.each do |field| - unless field.type_ast(@symtab).nil? - if ((possible_xlens.include?(32) && csr.defined_in_base32? && field.defined_in_base32?) || - (possible_xlens.include?(64) && csr.defined_in_base64? && field.defined_in_base64?)) - field.type_checked_type_ast(@symtab) - end - end - unless field.reset_value_ast(@symtab).nil? - if ((possible_xlens.include?(32) && csr.defined_in_base32? && field.defined_in_base32?) || - (possible_xlens.include?(64) && csr.defined_in_base64? && field.defined_in_base64?)) - field.type_checked_reset_value_ast(@symtab) if csr.defined_in_base32? && field.defined_in_base32? - end - end - unless field.sw_write_ast(@symtab).nil? - field.type_checked_sw_write_ast(@symtab, 32) if possible_xlens.include?(32) && csr.defined_in_base32? && field.defined_in_base32? - field.type_checked_sw_write_ast(@symtab, 64) if possible_xlens.include?(64) && csr.defined_in_base64? && field.defined_in_base64? - end - end - end + # puts "populating #{fn_name}" - progressbar = - if show_progress - ProgressBar.create(title: "Functions", total: functions.size) - end - functions.each do |func| - progressbar.increment if show_progress - func.type_check(@symtab) - end + # @obj_cache[fn_name] = + # @specification.send(fn_name).map { |spec_obj| spec_obj.clone(arch_def: self) } + # end + # end - puts "done" if show_progress - end + def_delegators \ + :@config, \ + :fully_configured?, :partially_configured?, :unconfigured?, :configured?, \ + :mxlen, :param_values # Returns whether or not it may be possible to switch XLEN given this definition. # @@ -223,15 +118,15 @@ def multi_xlen_in_mode?(mode) return true if unconfigured? if fully_configured? - ext?(:S) && (@param_values["SXLEN"] == 3264) + ext?(:S) && (param_values["SXLEN"] == 3264) elsif partially_configured? return false if prohibited_ext?(:S) return true unless ext?(:S) # if S is not known to be implemented, we can't say anything about it - return true unless @param_values.key?("SXLEN") + return true unless param_values.key?("SXLEN") - @param_values["SXLEN"] == 3264 + param_values["SXLEN"] == 3264 else raise "Unexpected configuration state" end @@ -241,13 +136,13 @@ def multi_xlen_in_mode?(mode) return true if unconfigured? if fully_configured? - ext?(:U) && (@param_values["UXLEN"] == 3264) + ext?(:U) && (param_values["UXLEN"] == 3264) elsif partially_configured? return true unless ext?(:U) # if U is not known to be implemented, we can't say anything about it - return true unless @param_values.key?("UXLEN") + return true unless param_values.key?("UXLEN") - @param_values["UXLEN"] == 3264 + param_values["UXLEN"] == 3264 else raise "Unexpected configuration state" end @@ -257,13 +152,13 @@ def multi_xlen_in_mode?(mode) return true if unconfigured? if fully_configured? - ext?(:H) && (@param_values["VSXLEN"] == 3264) + ext?(:H) && (param_values["VSXLEN"] == 3264) elsif partially_configured? return true unless ext?(:H) # if H is not known to be implemented, we can't say anything about it - return true unless @param_values.key?("VSXLEN") + return true unless param_values.key?("VSXLEN") - @param_values["VSXLEN"] == 3264 + param_values["VSXLEN"] == 3264 else raise "Unexpected configuration state" end @@ -273,13 +168,13 @@ def multi_xlen_in_mode?(mode) return true if unconfigured? if fully_configured? - ext?(:H) && (@param_values["VUXLEN"] == 3264) + ext?(:H) && (param_values["VUXLEN"] == 3264) elsif partially_configured? return true unless ext?(:H) # if H is not known to be implemented, we can't say anything about it - return true unless @param_values.key?("VUXLEN") + return true unless param_values.key?("VUXLEN") - @param_values["VUXLEN"] == 3264 + param_values["VUXLEN"] == 3264 else raise "Unexpected configuration state" end @@ -291,21 +186,148 @@ def multi_xlen_in_mode?(mode) # @return [Array] List of possible XLENs in any mode for this config def possible_xlens = multi_xlen? ? [32, 64] : [mxlen] + # hash for Hash lookup + def hash = @name_sym.hash + + # @return [Idl::SymbolTable] Symbol table with global scope + # @return [nil] if the architecture is not configured (use symtab_32 or symtab_64) + def symtab + raise NotImplementedError, "Un-configured ArchDefs have no symbol table" if @symtab.nil? + + @symtab + end + + def config_type = @config.type + + # Initialize a new configured architecture definition + # + # @param config_name [#to_s] The name of a configuration, which must correspond + # to a folder name under cfg_path + def initialize(config_name, arch_path, overlay_path: nil, cfg_path: "#{$root}/cfgs") + super(arch_path) + + @name = config_name.to_s.freeze + @name_sym = @name.to_sym.freeze + + @obj_cache = {} + + @config = Config.create("#{cfg_path}/#{config_name}/cfg.yaml") + @mxlen = @config.mxlen + @mxlen.freeze + + @idl_compiler = Idl::Compiler.new + + @symtab = Idl::SymbolTable.new(self) + custom_globals_path = overlay_path.nil? ? Pathname.new("/does/not/exist") : overlay_path / "isa" / "globals.isa" + idl_path = File.exist?(custom_globals_path) ? custom_globals_path : $root / "arch" / "isa" / "globals.isa" + @global_ast = @idl_compiler.compile_file( + idl_path + ) + @global_ast.add_global_symbols(@symtab) + @symtab.deep_freeze + @global_ast.freeze_tree(@symtab) + end + + # type check all IDL, including globals, instruction ops, and CSR functions + # + # @param config [Config] Configuration + # @param show_progress [Boolean] whether to show progress bars + # @param io [IO] where to write progress bars + # @return [void] + def type_check(show_progress: true, io: $stdout) + io.puts "Type checking IDL code for #{@config.name}..." + progressbar = + if show_progress + ProgressBar.create(title: "Instructions", total: instructions.size) + end + + instructions.each do |inst| + progressbar.increment if show_progress + if @mxlen == 32 + inst.type_checked_operation_ast(@idl_compiler, @symtab, 32) if inst.rv32? + elsif @mxlen == 64 + inst.type_checked_operation_ast(@idl_compiler, @symtab, 64) if inst.rv64? + inst.type_checked_operation_ast(@idl_compiler, @symtab, 32) if @config.possible_xlens.include?(32) && inst.rv32? + end + end + + progressbar = + if show_progress + ProgressBar.create(title: "CSRs", total: csrs.size) + end + + csrs.each do |csr| + progressbar.increment if show_progress + if csr.has_custom_sw_read? + if (@config.possible_xlens.include?(32) && csr.defined_in_base32?) || (@config.possible_xlens.include?(64) && csr.defined_in_base64?) + csr.type_checked_sw_read_ast(@symtab) + end + end + csr.fields.each do |field| + unless field.type_ast(@symtab).nil? + if ((@config.possible_xlens.include?(32) && csr.defined_in_base32? && field.defined_in_base32?) || + (@config.possible_xlens.include?(64) && csr.defined_in_base64? && field.defined_in_base64?)) + field.type_checked_type_ast(@symtab) + end + end + unless field.reset_value_ast(@symtab).nil? + if ((@config.possible_xlens.include?(32) && csr.defined_in_base32? && field.defined_in_base32?) || + (@config.possible_xlens.include?(64) && csr.defined_in_base64? && field.defined_in_base64?)) + field.type_checked_reset_value_ast(@symtab) if csr.defined_in_base32? && field.defined_in_base32? + end + end + unless field.sw_write_ast(@symtab).nil? + field.type_checked_sw_write_ast(@symtab, 32) if @config.possible_xlens.include?(32) && csr.defined_in_base32? && field.defined_in_base32? + field.type_checked_sw_write_ast(@symtab, 64) if @config.possible_xlens.include?(64) && csr.defined_in_base64? && field.defined_in_base64? + end + end + end + + progressbar = + if show_progress + ProgressBar.create(title: "Functions", total: functions.size) + end + functions.each do |func| + progressbar.increment if show_progress + func.type_check(@symtab) + end + + puts "done" if show_progress + end + # @return [Array] List of all available parameters with known values for the config def params_with_value return @params_with_value unless @params_with_value.nil? @params_with_value = [] - extensions.each do |ext_version| - ext = extension(ext_version.name) - ext.params.each do |ext_param| - if param_values.key?(ext_param.name) + return @params_with_value if @config.unconfigured? + + if @config.fully_configured? + transitive_implemented_extensions.each do |ext_version| + ext = extension(ext_version.name) + ext.params.each do |ext_param| + next unless @config.param_values.key?(ext_param.name) + @params_with_value << ExtensionParameterWithValue.new( ext_param, - param_values[ext_param.name] + @config.param_values[ext_param.name] ) end end + elsif @config.partially_configured? + mandatory_extensions.each do |ext_requirement| + ext = extension(ext_requirement.name) + ext.params.each do |ext_param| + next unless @config.param_values.key?(ext_param.name) + + @params_with_value << ExtensionParameterWithValue.new( + ext_param, + @config.param_values[ext_param.name] + ) + end + end + else + raise "ERROR: unexpected config type" end @params_with_value end @@ -315,12 +337,11 @@ def params_without_value return @params_without_value unless @params_without_value.nil? @params_without_value = [] - extensions.each do |ext_version| - ext = extension(ext_version.name) + extensions.each do |ext| ext.params.each do |ext_param| - unless param_values.key?(ext_param.name) - @params_without_value << ext_param - end + next if @config.param_values.key?(ext_param.name) + + @params_without_value << ext_param end end @params_without_value @@ -330,85 +351,104 @@ def params_without_value # @return [String] A string representation of the object. def inspect = "ArchDef##{name}" - # @return [Array] List of all extensions, even those that are't implemented - def extensions - return @extensions unless @extensions.nil? - - @extensions = [] - @arch_def["extensions"].each do |ext_data| - @extensions << Extension.new(ext_data, self) - end - @extensions - end - - # may be overridden by subclass - # @return [Array] List of all extensions known to be implemented in this architecture - def implemented_extensions - raise "implemented_extensions is only valid for a fully configured defintion" unless fully_configured? + # @return [Array] List of all extensions known to be implemented in this config, including transitive implications + def transitive_implemented_extensions + return @transitive_implemented_extensions unless @transitive_implemented_extensions.nil? - return @implemented_extensions unless @implemented_extensions.nil? + raise "implemented_extensions is only valid for a fully configured defintion" unless @config.fully_configured? - @implemented_extensions = [] - if @arch_def.key?("implemented_extensions") - @arch_def["implemented_extensions"].each do |e| - @implemented_extensions << ExtensionVersion.new(e["name"], e["version"], self) - end + list = @config.implemented_extensions(self) + list.each do |e| + implications = e.transitive_implications + list.concat(implications) unless implications.empty? end - @implemented_extensions + @transitive_implemented_extensions = list.uniq.sort end - # @return [Array] List of extensions that are explicitly required by an arch def - def mandatory_extensions - raise "mandatory_extensions is only valid for a partially configured defintion" unless partially_configured? - - return @mandatory_extensions unless @mandatory_extensions.nil? - - @mandatory_extensions = [] - if @arch_def.key?("mandatory_extensions") - @arch_def["mandatory_extensions"].each do |e| - @mandatory_extensions << ExtensionRequirement.new(e["name"], e["version"], presence: "mandatory") - end - end - @mandatory_extensions - end + # @return [Array] List of all mandatory extension requirements + def mandatory_extensions = @config.mandatory_extensions(self) - # @return [Array] List of extensions that are explicitly prohibited by an arch def + # @return [Array] List of all extensions that are prohibited. + # This includes extensions explicitly prohibited by the config file + # and extensions that conflict with a mandatory extension. def prohibited_extensions return @prohibited_extensions unless @prohibited_extensions.nil? - @prohibited_extensions = [] - if @arch_def.key?("prohibited_extensions") - @arch_def["prohibited_extensions"].each do |e| - if e.is_a?(String) - @prohibited_extensions << ExtensionRequirement.new(e, nil) + if @config.partially_configured? + @prohibited_extensions = @config.prohibited_extensions(self) + + # now add any extensions that are prohibited by a mandatory extension + mandatory_extensions.each do |ext_req| + ext_req.extension.conflicts.each do |conflict| + if @prohibited_extensions.none? { |prohibited_ext| prohibited_ext.name == conflict.name } + @prohibited_extensions << conflict + else + # pick whichever requirement is more expansive + p = @prohibited_extensions.find { |prohibited_ext| prohibited_ext.name == confict.name } + if p.version_requirement.subsumes?(conflict.version_requirement) + @prohibited_extensions.delete(p) + @prohibited_extensions << conflict + end + end + end + end + + @prohibited_extensions + elsif @config.fully_configured? + prohibited_ext_versions = [] + extensions.each do |ext| + ext.versions.each do |ext_ver| + prohibited_ext_versions << ext_ver unless transitive_implemented_extensions.include?(ext_ver) + end + end + @prohibited_extensions = [] + prohibited_ext_versions.group_by(&:name).each_value do |ext_ver_list| + if ext_ver_list.sort == ext_ver_list[0].ext.versions.sort + # excludes every version + @prohibited_extensions << + ExtensionRequirement.new( + ext_ver_list[0].ext.name, ">= #{ext_ver_list.min.version_spec.canonical}", + presence: "prohibited", arch_def: self + ) + elsif ext_ver_list.size == (ext_ver_list[0].ext.versions.size - 1) + # excludes all but one version + allowed_version_list = (ext_ver_list[0].ext.versions - ext_ver_list) + raise "Expected only a single element" unless allowed_version_list.size == 1 + + allowed_version = allowed_version_list[0] + @prohibited_extensions << + ExtensionRequirement.new( + ext_ver_list[0].ext.name, "!= #{allowed_version.version_spec.canonical}", + presence: "prohibited", arch_def: self + ) else - @prohibited_extensions << ExtensionRequirement.new(e["name"], e["requirements"], presence: "prohibited") + # need to group + raise "TODO" end end + else + @prohibited_extensions = [] end @prohibited_extensions end - def prohibited_ext?(ext_name) - prohibited_extensions.any? { |ext_req| ext_req.name == ext_name.to_s } - end - - # @return [Hash] Hash of all extensions, even those that aren't implemented, indexed by extension name - def extension_hash - return @extension_hash unless @extension_hash.nil? - - @extension_hash = {} - extensions.each do |ext| - @extension_hash[ext.name] = ext + # @overload prohibited_ext?(ext) + # Returns true if the ExtensionVersion +ext+ is prohibited + # @param ext [ExtensionVersion] An extension version + # @return [Boolean] + # + # @overload prohibited_ext?(ext) + # Returns true if any version of the extension named +ext+ is prohibited + # @param ext [String] An extension name + # @return [Boolean] + def prohibited_ext?(ext) + if ext.is_a?(ExtensionVersion) + prohibited_extensions.any? { |ext_req| ext_req.satisfied_by?(ext) } + elsif ext.is_a?(String) || ext.is_a?(Symbol) + prohibited_extensions.any? { |ext_req| ext_req.name == ext.to_s } + else + raise ArgumentError, "Argument to prohibited_ext? should be an ExtensionVersion or a String" end - @extension_hash - end - - # @param name [#to_s] Extension name - # @return [Extension] Extension named `name` - # @return [nil] if no extension `name` exists - def extension(name) - extension_hash[name.to_s] end # @overload ext?(ext_name) @@ -416,8 +456,7 @@ def extension(name) # @return [Boolean] True if the extension `name` is implemented # @overload ext?(ext_name, ext_version_requirements) # @param ext_name [#to_s] Extension name (case sensitive) - # @param ext_version_requirements [Number,String,Array] Extension version requirements, taking the same inputs as Gem::Requirement - # @see https://docs.ruby-lang.org/en/3.0/Gem/Requirement.html#method-c-new Gem::Requirement#new + # @param ext_version_requirements [Number,String,Array] Extension version requirements # @return [Boolean] True if the extension `name` meeting `ext_version_requirements` is implemented # @example Checking extension presence with a version requirement # arch_def.ext?(:S, ">= 1.12") @@ -431,23 +470,23 @@ def ext?(ext_name, *ext_version_requirements) return cached_result unless cached_result.nil? result = - if fully_configured? - implemented_extensions.any? do |e| + if @config.fully_configured? + transitive_implemented_extensions.any? do |e| if ext_version_requirements.empty? e.name == ext_name.to_s else - requirement = Gem::Requirement.new(ext_version_requirements) - (e.name == ext_name.to_s) && requirement.satisfied_by?(e.version) + requirement = ExtensionRequirement.new(ext_name, *ext_version_requirements, arch_def: self) + requirement.satisfied_by?(e) end end - elsif partially_configured? + elsif @config.partially_configured? mandatory_extensions.any? do |e| if ext_version_requirements.empty? e.name == ext_name.to_s else - requirement = Gem::Requirement.new(ext_version_requirements) + requirement = ExtensionRequirement.new(ext_name, *ext_version_requirements, arch_def: self) e.satisfying_versions.all? do |ext_ver| - (e.name == ext_name.to_s) && requirement.satisfied_by?(exrt_ver.version) + requirement.satisfied_by?(ext_ver) end end end @@ -459,311 +498,6 @@ def ext?(ext_name, *ext_version_requirements) @ext_cache[[ext_name, ext_version_requirements]] = result end - # @return [Array] Array of all extensions that are prohibited because they are excluded by an implemented extension - def conflicting_extensions - extensions.map(&:conflicts).flatten - end - - # @return [Boolean] whether or not ext_name is prohibited because it is excluded by an implemented extension - def conflicting_ext?(ext_name) - prohibited_extensions.include? { |ext_req| ext_req.name == ext_name } - end - - # @return [Array] Alphabetical list of all parameters defined in the architecture - def params - return @params unless @params.nil? - - @params = extensions.map(&:params).flatten.uniq(&:name).sort_by!(&:name) - end - - # @return [Hash] Hash of all extension parameters defined in the architecture - def params_hash - return @params_hash unless @params_hash.nil? - - @params_hash = {} - params.each do |param| - @params_hash[param.name] = param - end - @param_hash - end - - # @return [ExtensionParameter] Parameter named +name+ - # @return [nil] if there is no parameter named +name+ - def param(name) - params_hash[name] - end - - # @return [Array] List of all CSRs defined by RISC-V, whether or not they are implemented - def csrs - return @csrs unless @csrs.nil? - - @csrs = @arch_def["csrs"].map do |csr_data| - Csr.new(csr_data) - end - end - - # @return [Array] List of all known CSRs, even those not implemented by - # this config - def all_known_csr_names - @arch_def["csrs"].map { |csr| csr[0] } - end - - # @return [Hash] All csrs, even unimplemented ones, indexed by CSR name - def csr_hash - return @csr_hash unless @csr_hash.nil? - - @csr_hash = {} - csrs.each do |csr| - @csr_hash[csr.name] = csr - end - @csr_hash - end - - # @param csr_name [#to_s] CSR name - # @return [Csr,nil] a specific csr, or nil if it doesn't exist - def csr(csr_name) - csr_hash[csr_name] - end - - # @return [Array] List of all instructions, whether or not they are implemented - def instructions - return @instructions unless @instructions.nil? - - @instructions = @arch_def["instructions"].map do |inst_data| - Instruction.new(inst_data, self) - end - - @instructions - end - - # @return [Hash] All instructions, indexed by name - def instruction_hash - return @instruction_hash unless @instruction_hash.nil? - - @instruction_hash = {} - instructions.each do |inst| - @instruction_hash[inst.name] = inst - end - @instruction_hash - end - - # @param inst_name [#to_s] Instruction name - # @return [Instruction,nil] An instruction named 'inst_name', or nil if it doesn't exist - def inst(inst_name) - instruction_hash[inst_name.to_s] - end - alias instruction inst - - # @return [Array] List of all functions defined by the architecture - def functions - return @functions unless @functions.nil? - - @functions = @global_ast.functions - end - - # @return [Hash] Function hash of name => FunctionBodyAst - def function_hash - return @function_hash unless @function_hash.nil? - - @function_hash = {} - functions.each do |func| - @function_hash[func.name] = func - end - - @function_hash - end - - # @param name [String] A function name - # @return [Idl::FunctionBodyAst] A function named +name+ - # @return [nil] if no function named +name+ is found - def function(name) - function_hash[name] - end - - # @return [Array] List of all manuals defined by the architecture - def manuals - return @manuals unless @manuals.nil? - - @manuals = [] - @arch_def["manuals"].each_value do |manual_data| - @manuals << Manual.new(manual_data, self) - end - @manuals - end - - # @return [Hash] All manuals, indexed by name - def manuals_hash - return @manuals_hash unless @manuals_hash.nil? - - @manuals_hash = {} - manuals.each do |manual| - @manuals_hash[manual.name] = manual - end - @manuals_hash - end - - # @return [Manual,nil] A manual named +name+, or nil if it doesn't exist - def manual(name) = manuals_hash[name] - - # @return [Array] All known profile classes (e.g. RVA) - def profile_classes - return @profile_classes unless @profile_classes.nil? - - @profile_classes = [] - @arch_def["profile_classes"].each_value do |pc_data| - @profile_classes << ProfileClass.new(pc_data, self) - end - @profile_classes - end - - # @return [Hash] Profile classes, indexed by profile class name - def profile_classes_hash - return @profile_classes_hash unless @profile_classes_hash.nil? - - @profile_classes_hash = {} - profile_classes.each do |pc| - @profile_classes_hash[pc.name] = pc - end - @profile_classes_hash - end - - # @return [ProfileClass] The profile class named +name+ - # @return [nil] if the profile class does not exist - def profile_class(profile_class_name) = profile_classes_hash[profile_class_name] - - # @return [ProfileRelease] List of all profile releases (e.g. RVA20, RVA22) for all profile classes. - def profile_releases - return @profile_releases unless @profile_releases.nil? - - @profile_releases = [] - @arch_def["profile_releases"].each_value do |pr_data| - raise ArgumentError, "Expecting pr_data to be a hash" unless pr_data.is_a?(Hash) - - profile_release = ProfileRelease.new(pr_data, self) - raise ArgumentError, "ProfileRelease constructor returned nil" if profile_release.nil? - - @profile_releases << profile_release - end - @profile_releases - end - - # @return [Hash], indexed by profile release name - def profile_releases_hash - return @profile_releases_hash unless @profile_releases_hash.nil? - - @profile_releases_hash = {} - profile_releases.each do |profile_release| - @profile_releases_hash[profile_release.name] = profile_release - end - @profile_releases_hash - end - - # @return [ProfileRelease] The profile release named +profile_release_name+ - # @return [nil] if the profile release does not exist - def profile_release(profile_release_name) = profile_releases_hash[profile_release_name] - - # @return [Profile] List of all defined profiles in all releases in all classes - def profiles - return @profiles unless @profiles.nil? - - @profiles = [] - @arch_def["profile_releases"].each_value do |pr_data| - raise ArgumentError, "Expecting pr_data to be a hash" unless pr_data.is_a?(Hash) - - pr_data["profiles"].each do |profile_name, profile_data| - profile_data["name"] = profile_name - profile = Profile.new(profile_data, self) - raise ArgumentError, "Profile constructor returned nil" if profile.nil? - - @profiles << profile - end - end - @profiles - end - - # @return [Hash] Profiles, indexed by profile name - def profiles_hash - return @profiles_hash unless @profiles_hash.nil? - - @profiles_hash = {} - profiles.each do |profile| - @profiles_hash[profile.name] = profile - end - @profiles_hash - end - - # @return [Profile] The profile named +name+ - # @return [nil] if the profile does not exist - def profile(name) = profiles_hash[name] - - def cert_classes - return @cert_classes unless @cert_classes.nil? - - @cert_classes = [] - @arch_def["certificate_classes"].each do |cc_data| - @cert_classes << CertClass.new(cc_data, self) - end - @cert_classes - end - - def cert_classes_hash - return @cert_classes_hash unless @cert_classes_hash.nil? - - @cert_classes_hash = {} - cert_classes.each do |cc| - @cert_classes_hash[cc.name] = cc - end - @cert_classes_hash - end - - # @return [CertClass] The certificate class named +name+ - # @return [nil] if the certificate class does not exist - def cert_class(name) = cert_classes_hash[name] - - # @return [CertModel] List of all defined certificate models across all certificate classes - def cert_models - return @cert_models unless @cert_models.nil? - - @cert_models = [] - @arch_def["certificate_models"].each do |cm_data| - @cert_models << CertModel.new(cm_data, self) - end - @cert_models - end - - def cert_models_hash - return @cert_models_hash unless @cert_models_hash.nil? - - @cert_models_hash = {} - cert_models.each do |cert_model| - @cert_models_hash[cert_model.name] = cert_model - end - @cert_models_hash - end - - # @return [CertModel] The CertModel named +name+ - # @return [nil] if the CertModel does not exist - def cert_model(name) = cert_models_hash[name] - - # @return [Array] All exception codes defined by RISC-V - def exception_codes - return @exception_codes unless @exception_codes.nil? - - @exception_codes = - extensions.reduce([]) do |list, ext_version| - ecodes = extension(ext_version.name)["exception_codes"] - next list if ecodes.nil? - - ecodes.each do |ecode| - # double check that all the codes are unique - raise "Duplicate exception code" if list.any? { |e| e.num == ecode["num"] || e.name == ecode["name"] || e.var == ecode["var"] } - - list << ExceptionCode.new(ecode["name"], ecode["var"], ecode["num"], self) - end - list - end - end - # @return [Array] All exception codes known to be implemented def implemented_exception_codes return @implemented_exception_codes unless @implemented_exception_codes.nil? @@ -787,27 +521,6 @@ def implemented_exception_codes end end - # @return [Array] All interrupt codes defined by extensions - def interrupt_codes - return @interrupt_codes unless @interrupt_codes.nil? - - @interupt_codes = - extensions.reduce([]) do |list, ext_version| - icodes = extension(ext_version.name)["interrupt_codes"] - next list if icodes.nil? - - icodes.each do |icode| - # double check that all the codes are unique - if list.any? { |i| i.num == icode["num"] || i.name == icode["name"] || i.var == icode["var"] } - raise "Duplicate interrupt code" - end - - list << InterruptCode.new(icode["name"], icode["var"], icode["num"], self) - end - list - end - end - # @return [Array] All interrupt codes known to be implemented def implemented_interrupt_codes return @implemented_interrupt_codes unless @implemented_interrupt_codes.nil? @@ -831,106 +544,25 @@ def implemented_interrupt_codes end end - # @return [Hash] The raw architecture defintion data structure - def data - @arch_def - end - - # given a `$ref` target, return the Ruby object - # - # @params uri [String] JSON Reference pointer - # @return [Object] The pointed-to object - def ref(uri) - raise ArgumentError, "JSON Reference must contain one '#'" unless uri.count("#") == 1 - - file_path, obj_path = uri.split("#") - obj = - case file_path - when /^certificate_class.*/ - cert_class_name = File.basename(file_path, ".yaml") - cert_class(cert_class_name) - when /^certificate_model.*/ - cert_mode_name = File.basename(file_path, ".yaml") - cert_model(cert_model_name) - when /^csr.*/ - csr_name = File.basename(file_path, ".yaml") - csr(csr_name) - when /^ext.*/ - ext_name = File.basename(file_path, ".yaml") - extension(ext_name) - when /^inst.*/ - inst_name = File.basename(file_path, ".yaml") - instruction(inst_name) - when /^manual.*/ - manual_name = File.basename(file_path, ".yaml") - manual(manual_name) - when /^profile_class.*/ - profile_class_name = File.basename(file_path, ".yaml") - profile_class(profile_class_name) - when /^profile_release.*/ - profile_release_name = File.basename(file_path, ".yaml") - profile_release(profile_release_name) - else - raise "Unhandled ref object: #{file_path}" - end - - if obj_path.nil? - obj - else - parts = obj_path.split("/") - parts.each do |part| - raise "Error in $ref. There is no method '#{part}' for a #{obj.class.name}" unless obj.respond_to?(part.to_sym) + # @return [Array] List of all functions defined by the architecture + def functions + return @functions unless @functions.nil? - obj = obj.send(part) - end - obj - end + @functions = @global_ast.functions end # @return [Array] List of all implemented CSRs - def implemented_csrs - return @implemented_csrs unless @implemented_csrs.nil? - - @implemented_csrs = - if @arch_def.key?("implemented_csrs") - csrs.select { |c| @arch_def["implemented_csrs"].include?(c.name) } - else - [] - end - end - - # @return [Hash] Implemented csrs, indexed by CSR name - def implemented_csr_hash - return @implemented_csr_hash unless @implemented_csr_hash.nil? - - @implemented_csr_hash = {} - implemented_csrs.each do |csr| - @implemented_csr_hash[csr.name] = csr - end - @implemented_csr_hash - end - - # @param csr_name [#to_s] CSR name - # @return [Csr,nil] a specific csr, or nil if it doesn't exist or isn't implemented - def implemented_csr(csr_name) - implemented_csr_hash[csr_name] + def transitive_implemented_csrs + @transitive_implemented_csrs ||= + transitive_implemented_extensions.map(&:implemented_csrs).flatten.uniq.sort end # @return [Array] List of all implemented instructions - def implemented_instructions - return @implemented_instructions unless @implemented_instructions.nil? - - @implemented_instructions = - if @arch_def.key?("implemented_instructions") - @arch_def["implemented_instructions"].map do |inst_name| - instruction_hash[inst_name] - end - else - [] - end + def transitive_implemented_instructions + @transitive_implemented_instructions ||= + transitive_implemented_extensions.map(&:implemented_instructions).flatten.uniq.sort end - # @return [Array] List of all reachable IDL functions for the config def implemented_functions return @implemented_functions unless @implemented_functions.nil? @@ -939,7 +571,7 @@ def implemented_functions puts " Finding all reachable functions from instruction operations" - implemented_instructions.each do |inst| + transitive_implemented_instructions.each do |inst| @implemented_functions << if inst.base.nil? if multi_xlen? @@ -958,7 +590,7 @@ def implemented_functions puts " Finding all reachable functions from CSR operations" - implemented_csrs.each do |csr| + transitive_implemented_csrs.each do |csr| csr_funcs = csr.reachable_functions(self) csr_funcs.each do |f| @implemented_functions << f unless @implemented_functions.any? { |i| i.name == f.name } @@ -982,7 +614,7 @@ def find_replace_links(adoc) "%%LINK%csr_field;#{csr_name}.#{field_name};#{csr_name}.#{field_name}%%" elsif !csr.nil? "%%LINK%csr;#{csr_name};#{csr_name}%%" - elsif inst(name) + elsif instruction(name) "%%LINK%inst;#{name};#{name}%%" elsif extension(name) "%%LINK%ext;#{name};#{name}%%" @@ -1067,32 +699,11 @@ def implemented_interrupt_codes end private :erb_env - # create a new raw *unconfigured* architecture defintion data structure - # - # The data will not include anything configuration-dependent such as implemented_*/mandatory_*/etc. - # - # This function can be used to create a new arch_def for a different configuration - # - # @return [Hash] A unconfigured architecture definition - def unconfigured_data - { - "type" => "partially configured", - "instructions" => instructions.map(&:data), - "extensions" => extensions.map.map(&:data), - "csrs" => csrs.map(&:data), - "profile_classes" => profile_classes.map { |f| [f.name, f.data] }.to_h, - "profile_releases" => profile_releases.map { |p| [p.name, p.data] }.to_h, - "manuals" => manuals.map { |m| [m.name, m.data] }.to_h, - "certificate_classes" => cert_classes.map(&:data), - "certificate_models" => cert_models.map(&:data) - } - end - # passes _erb_template_ through ERB within the content of this config # # @param erb_template [String] ERB source # @return [String] The rendered text - def render_erb(erb_template, what='') + def render_erb(erb_template, what = "") t = Tempfile.new("template") t.write erb_template t.flush @@ -1107,29 +718,3 @@ def render_erb(erb_template, what='') end end end - -# a synchroncous exception code -class ExceptionCode - # @return [String] Long-form display name (can include special characters) - attr_reader :name - - # @return [String] Field name for an IDL enum - attr_reader :var - - # @return [Integer] Code, written into *mcause - attr_reader :num - - # @return [Extension] Extension that defines this code - attr_reader :ext - - def initialize(name, var, number, ext) - @name = name - @name.freeze - @var = var - @num = number - @ext = ext - end -end - -# all the same informatin as ExceptinCode, but for interrupts -InterruptCode = Class.new(ExceptionCode) diff --git a/lib/arch_obj_models/certificate.rb b/lib/arch_obj_models/certificate.rb index 5b8f0552a..48200775b 100644 --- a/lib/arch_obj_models/certificate.rb +++ b/lib/arch_obj_models/certificate.rb @@ -10,12 +10,6 @@ # Holds information from certificate class YAML file. # The inherited "data" member is the database of extensions, instructions, CSRs, etc. class CertClass < PortfolioClass - # @param data [Hash] The data from YAML - # @param arch_def [ArchDef] Architecture spec - def initialize(data, arch_def) - super(data, arch_def) - end - def mandatory_priv_modes = @data["mandatory_priv_modes"] end @@ -26,12 +20,6 @@ def mandatory_priv_modes = @data["mandatory_priv_modes"] # Holds information about a certificate model YAML file. # The inherited "data" member is the database of extensions, instructions, CSRs, etc. class CertModel < PortfolioInstance - # @param data [Hash] The data from YAML - # @param arch_def [ArchDef] Architecture spec - def initialize(data, arch_def) - super(data, arch_def) - end - def unpriv_isa_manual_revision = @data["unpriv_isa_manual_revision"] def priv_isa_manual_revision = @data["priv_isa_manual_revision"] def debug_manual_revision = @data["debug_manual_revision"] @@ -59,12 +47,14 @@ def cert_class ##################### # Holds extra requirements not associated with extensions or their parameters. - class Requirement < ArchDefObject + class Requirement def initialize(data, arch_def) - super(data) + @data = data @arch_def = arch_def end + def name = @data["name"] + def description = @data["description"] def when = @data["when"] @@ -91,12 +81,14 @@ def when_pretty # Holds a group of Requirement objects to provide a one-level group. # Can't nest RequirementGroup objects to make multi-level group. - class RequirementGroup < ArchDefObject + class RequirementGroup def initialize(data, arch_def) - super(data) + @data = data @arch_def = arch_def end + def name = @data["name"] + def description = @data["description"] def when = @data["when"] @@ -136,4 +128,4 @@ def requirement_groups end @requirement_groups end -end \ No newline at end of file +end diff --git a/lib/arch_obj_models/csr.rb b/lib/arch_obj_models/csr.rb index 77613e8ee..13c846fc0 100644 --- a/lib/arch_obj_models/csr.rb +++ b/lib/arch_obj_models/csr.rb @@ -2,7 +2,6 @@ require_relative "obj" - # CSR definition class Csr < ArchDefObject def ==(other) @@ -118,7 +117,7 @@ def dynamic_length?(arch_def) when "MXLEN" # mxlen can never change at runtime, so if we have it in the config, the length is not dynamic # if we don't have it in the config, we don't know what the length is - return arch_def.mxlen.nil? + arch_def.mxlen.nil? when "SXLEN" # dynamic if either we don't know SXLEN or SXLEN is explicitly mutable [nil, 3264].include?(arch_def.param_values["SXLEN"]) @@ -143,19 +142,6 @@ def min_length(arch_def) end end - # @param arch_def [ArchDef] Architecture definition - # @return [Integer] Largest length of the CSR in any mode - def max_length(arch_def) - case @data["length"] - when "MXLEN", "SXLEN", "VSXLEN" - 64 - when Integer - @data["length"] - else - raise "Unexpected length" - end - end - # @param arch_def [ArchDef] A configuration (can be nil if the lenth is not dependent on a config parameter) # @param effective_xlen [Integer] The effective xlen, needed since some fields change location with XLEN. If the field location is not determined by XLEN, then this parameter can be nil # @return [Integer] Length, in bits, of the CSR, given effective_xlen @@ -360,7 +346,7 @@ def implemented_fields(arch_def) def fields return @fields unless @fields.nil? - @fields = @data["fields"].map { |_field_name, field_data| CsrField.new(self, field_data) } + @fields = @data["fields"].map { |field_name, field_data| CsrField.new(self, field_name, field_data) } end # @return [Array] All known fields of this CSR when XLEN == +effective_xlen+ @@ -557,10 +543,10 @@ def wavedrom_desc(arch_def, effective_xlen, exclude_unimplemented: false, option def exists_in_cfg?(arch_def) if arch_def.fully_configured? (@data["base"].nil? || (arch_def.possible_xlens.include? @data["base"])) && - arch_def.implemented_extensions.any? { |e| defined_by?(e) } + arch_def.transitive_implemented_extensions.any? { |e| defined_by?(e) } else (@data["base"].nil? || (arch_def.possible_xlens.include? @data["base"])) && - arch_def.prohibited_extensions.none? { |e| defined_by?(e) } + arch_def.prohibited_extensions.none? { |ext_req| ext_req.satisfying_versions.any? { |e| defined_by?(e) } } end end @@ -571,7 +557,7 @@ def optional_in_cfg?(arch_def) exists_in_cfg?(arch_def) && arch_def.mandatory_extensions.all? do |ext_req| - ext_req.satisfying_versions(arch_def).none? do |ext_ver| + ext_req.satisfying_versions.none? do |ext_ver| defined_by?(ext_ver) end end diff --git a/lib/arch_obj_models/csr_field.rb b/lib/arch_obj_models/csr_field.rb index 55cb7b9d8..258e9e4ea 100644 --- a/lib/arch_obj_models/csr_field.rb +++ b/lib/arch_obj_models/csr_field.rb @@ -23,8 +23,9 @@ def base # @param parent_csr [Csr] The Csr that defined this field # @param field_data [Hash] Field data from the arch spec - def initialize(parent_csr, field_data) - super(field_data) + def initialize(parent_csr, field_name, field_data) + super(field_data, parent_csr.data_path, arch: parent_csr.arch) + @name = field_name @parent = parent_csr end @@ -35,13 +36,13 @@ def exists_in_cfg?(arch_def) if arch_def.fully_configured? parent.exists_in_cfg?(arch_def) && (@data["base"].nil? || arch_def.possible_xlens.include?(@data["base"])) && - (@data["definedBy"].nil? || arch_def.implemented_extensions.any? { |ext_ver| defined_by?(ext_ver) }) + (@data["definedBy"].nil? || arch_def.transitive_implemented_extensions.any? { |ext_ver| defined_by?(ext_ver) }) else raise "unexpected type" unless arch_def.partially_configured? parent.exists_in_cfg?(arch_def) && (@data["base"].nil? || arch_def.possible_xlens.include?(@data["base"])) && - (@data["definedBy"].nil? || arch_def.prohibited_extensions.none? { |ext_ver| defined_by?(ext_ver) }) + (@data["definedBy"].nil? || arch_def.prohibited_extensions.none? { |ext_req| ext_req.satisfying_versions.any? { |ext_ver| defined_by?(ext_ver) } }) end end @@ -55,7 +56,7 @@ def optional_in_cfg?(arch_def) parent.optional_in_cfg?(arch_def) else arch_def.mandatory_extensions.all? do |ext_req| - ext_req.satisfying_versions(arch_def).none? do |ext_ver| + ext_req.satisfying_versions.none? do |ext_ver| defined_by?(ext_ver) end end @@ -174,8 +175,6 @@ def type(symtab) idl = @data["type()"] raise "type() is nil for #{csr.name}.#{name} #{@data}?" if idl.nil? - - # value_result = Idl::AstNode.value_try do ast = type_checked_type_ast(symtab) begin diff --git a/lib/arch_obj_models/exception_code.rb b/lib/arch_obj_models/exception_code.rb new file mode 100644 index 000000000..633230315 --- /dev/null +++ b/lib/arch_obj_models/exception_code.rb @@ -0,0 +1,27 @@ +# frozen_string_literal: true + +# a synchroncous exception code +class ExceptionCode + # @return [String] Long-form display name (can include special characters) + attr_reader :name + + # @return [String] Field name for an IDL enum + attr_reader :var + + # @return [Integer] Code, written into *mcause + attr_reader :num + + # @return [Extension] Extension that defines this code + attr_reader :ext + + def initialize(name, var, number, ext) + @name = name + @name.freeze + @var = var + @num = number + @ext = ext + end +end + +# all the same informatin as ExceptinCode, but for interrupts +InterruptCode = Class.new(ExceptionCode) diff --git a/lib/arch_obj_models/extension.rb b/lib/arch_obj_models/extension.rb index b40aff3c5..d14319296 100644 --- a/lib/arch_obj_models/extension.rb +++ b/lib/arch_obj_models/extension.rb @@ -2,11 +2,12 @@ require_relative "obj" require_relative "schema" +require_relative "../version" # A parameter (AKA option, AKA implementation-defined value) supported by an extension class ExtensionParameter # @return [ArchDef] The defining Arch def - attr_reader :archdef + attr_reader :arch_def # @return [String] Parameter name attr_reader :name @@ -36,7 +37,7 @@ def schema_type end def initialize(ext, name, data) - @archdef = ext.arch_def + @arch_def = ext.arch_def @data = data @name = name @desc = data["description"] @@ -45,8 +46,9 @@ def initialize(ext, name, data) also_defined_in = [] unless data["also_defined_in"].nil? if data["also_defined_in"].is_a?(String) - other_ext = @archdef.extension(data["also_defined_in"]) + other_ext = @arch_def.extension(data["also_defined_in"]) raise "Definition error in #{ext.name}.#{name}: #{data['also_defined_in']} is not a known extension" if other_ext.nil? + also_defined_in << other_ext else unless data["also_defined_in"].is_a?(Array) && data["also_defined_in"].all? { |e| e.is_a?(String) } @@ -54,8 +56,9 @@ def initialize(ext, name, data) end data["also_defined_in"].each do |other_ext_name| - other_ext = @archdef.extension(other_ext_name) + other_ext = @arch_def.extension(other_ext_name) raise "Definition error in #{ext.name}.#{name}: #{data['also_defined_in']} is not a known extension" if other_ext.nil? + also_defined_in << other_ext end end @@ -64,10 +67,15 @@ def initialize(ext, name, data) @idl_type = @schema.to_idl_type.make_const.freeze end + # @param version [ExtensionVersion] + # @return [Boolean] if this parameter is defined in +version+ def defined_in_extension_version?(version) + return false if @exts.none? { |ext| ext.name == version.ext.name } return true if @data.dig("when", "version").nil? - Gem::Requirement.new(@data["when"]["version"]).satisfied_by?(Gem::Version.new(version)) + @exts.any? do |ext| + ExtensionRequirement.new(ext.name, @data["when"]["version"], arch_def: ext.arch_def).satisfied_by?(version) + end end # @return [String] @@ -77,8 +85,8 @@ def name_potentially_with_link(exts) if exts.size == 1 "<>" - else - "#{name}" + else + name end end @@ -136,13 +144,15 @@ def doc_license @data["doc_license"] end - # @return [Array] versions hash from config + # @return [Array] versions hash from config, sorted by version number def versions return @versions unless @versions.nil? @versions = @data["versions"].map do |v| ExtensionVersion.new(name, v["version"], arch_def) end + @versions.sort! + @versions end # @return [Array] Ratified versions hash from config @@ -152,12 +162,12 @@ def ratified_versions # @return [ExtensionVersion] Mimumum defined version of this extension def min_version - versions.min { |a, b| a.version <=> b.version } + versions.min { |a, b| a.version_spec <=> b.version_spec } end # @return [ExtensionVersion] Maximum defined version of this extension def max_version - versions.max { |a, b| a.version <=> b.version } + versions.max { |a, b| a.version_spec <=> b.version_spec } end # @return [ExtensionVersion] Mimumum defined ratified version of this extension @@ -165,7 +175,7 @@ def max_version def min_ratified_version return nil if ratified_versions.empty? - ratified_versions.min { |a, b| a.version <=> b.version } + ratified_versions.min { |a, b| a.version_spec <=> b.version_spec } end # @return [Array] List of parameters added by this extension @@ -181,25 +191,39 @@ def params @params end - # @param ext_data [Hash] The extension data from the architecture spec - # @param arch_def [ArchDef] The architecture definition - def initialize(ext_data, arch_def) - super(ext_data) - @arch_def = arch_def - end - # @param version_requirement [String] Version requirement # @return [Array] Array of extensions implied by any version of this extension meeting version_requirement - def implies(version_requirement = ">= 0") - return [] unless Gem::Requirement.new(version_requirement).satisfied_by?(max_version.version) + def implies(version_requirement = nil) + if version_requirement.nil? + return [] unless ExtensionRequirement.new(@new, arch_def: @arch_def).satisfied_by?(max_version.version) + else + return [] unless ExtensionRequirement.new(@new, version_requirement, arch_def: @arch_def).satisfied_by?(max_version.version) + end max_version.implications end + # @return [Array] List of conflicting extension requirements def conflicts return [] if @data["conflicts"].nil? - to_extension_requirement_list(@data["conflicts"]) + if @data["conflicts"].is_a?(String) + [ExtensionRequirement.new(@data["conflicts"], arch_def: @arch_def)] + elsif @data["conflicts"].is_a?(Hash) + [ExtensionRequirement.new(@data["conflicts"]["name"], @data["conflicts"]["version"], arch_def: @arch_def)] + elsif @data["conflicts"].is_a?(Array) + @data["conflicts"].map do |conflict| + if conflict.is_a?(String) + ExtensionRequirement.new(conflict, arch_def: @arch_def) + elsif conflict.is_a?(Array) + ExtensionRequirement.new(conflict["name"], conflict["version"], arch_def: @arch_def) + else + raise "Invalid conflicts data: #{conflict.inspect}" + end + end + else + raise "Invalid conflicts data: #{@data["conflicts"].inspect}" + end end # @return [Array] the list of instructions implemented by *any version* of this extension (may be empty) @@ -216,28 +240,6 @@ def csrs @csrs = arch_def.csrs.select { |csr| versions.any? { |v| csr.defined_by?(v) } } end - # @return [Array] the list of CSRs implemented by this extension (may be empty) - def implemented_csrs(archdef) - raise "should only be called with a fully configured arch def" unless archdef.fully_configured? - - return @implemented_csrs unless @implemented_csrs.nil? - - @implemented_csrs = archdef.implemented_csrs.select do |csr| - versions.any? { |ver| csr.defined_by?(ExtensionVersion.new(name, ver["version"], @arch_def)) } - end - end - - # @return [Array] the list of CSRs implemented by this extension (may be empty) - def implemented_instructions(archdef) - raise "should only be called with a fully configured arch def" unless archdef.fully_configured? - - return @implemented_instructions unless @implemented_instructions.nil? - - @implemented_instructions = archdef.implemented_instructions.select do |inst| - versions.any? { |ver| inst.defined_by?(ExtensionVersion.new(name, ver["version"], @arch_def)) } - end - end - # return the set of reachable functions from any of this extensions's CSRs or instructions in the given evaluation context # # @param symtab [Idl::SymbolTable] The evaluation context @@ -262,22 +264,6 @@ def reachable_functions(symtab) @reachable_functions[symtab] = funcs.uniq end - - # @return [Array] Array of IDL functions reachable from any instruction or CSR in the extension, irrespective of a specific evaluation context - def reachable_functions_unevaluated - return @reachable_functions_unevaluated unless @reachable_functions_unevaluated.nil? - - funcs = [] - instructions.each do |inst| - funcs += inst.operation_ast(arch_def.symtab).reachable_functions(arch_def.symtab) - end - - csrs.each do |csr| - funcs += csr.reachable_functions(arch_def) - end - - @reachable_functions_unevaluated = funcs.uniq(&:name) - end end # A specific version of an extension @@ -285,26 +271,77 @@ class ExtensionVersion # @return [String] Name of the extension attr_reader :name - # @return [Gem::Version] Version of the extension - attr_reader :version - # @return [Extension] Extension attr_reader :ext + # @return [VersionSpec] + attr_reader :version_spec + + # @return [String] + attr_reader :version_str + # @param name [#to_s] The extension name - # @param version [Integer,String] The version specifier + # @param version [String] The version specifier # @param arch_def [ArchDef] The architecture definition - def initialize(name, version, arch_def) + def initialize(name, version_str, arch_def, fail_if_version_does_not_exist: false) @name = name.to_s - @version = Gem::Version.new(version) + @version_str = version_str + @version_spec = VersionSpec.new(version_str) + + raise ArgumentError, "Must supply arch" if arch_def.nil? + @arch_def = arch_def - unless arch_def.nil? - @ext = arch_def.extension(@name) - raise "Extension #{name} not found in arch def" if @ext.nil? - @data = @ext.data["versions"].find { |v| v["version"] == version.to_s } - raise "Extension #{name} version #{version} not found in arch def" if @data.nil? + @ext = @arch_def.extension(@name) + raise "Extension #{name} not found in arch def" if @ext.nil? + + @data = @ext.data["versions"].find { |v| VersionSpec.new(v["version"]) == @version_spec } + + if fail_if_version_does_not_exist && @data.nil? + raise ArgumentError, "#{@name}, Version #{version_str} is not defined" + elsif @data.nil? + warn "#{@name}, Version #{version_str} is not defined" + end + end + + # @return [Array] List of known ExtensionVersions that are compatible with this ExtensionVersion (i.e., have larger version number and are not breaking) + def compatible_versions + return @compatible_versions unless @compatible_versions.nil? + + @compatible_versions = [] + @ext.versions.each do |v| + @compatible_versions << v if v.version_spec >= @version_spec + break if @compatible_versions.size.positive? && v.breaking? end + raise "Didn't even find self?" if compatible_versions.empty? + + @compatible_versions + end + + # @param other [ExtensionVersion] + # @return [Boolean] Whether or not +other+ is compatible with self + def compatible?(other) = compatible_versions.include?(other) + + # @return [Boolean] Whether or not this is a breaking version (i.e., incompatible with all prior versions) + def breaking? + !@data["breaking"].nil? + end + + # @return [String] Canonical version string + def canonical_version = @version_spec.canonical + + # @param other [ExtensionVersion] An extension name and version + # @return [Boolean] whether or not this ExtensionVersion has the exact same name and version as other + def eql?(other) + raise "ExtensionVersion is not comparable to #{other.class}" unless other.is_a?(ExtensionVersion) + + @ext.name == other.ext.name && @version_spec.eql?(other.version_spec) + end + + # @param other [ExtensionVersion] An extension name and version + # @return [Boolean] whether or not this ExtensionVersion has the exact same name and version as other + def ==(other) + eql?(other) end # @return [String] The state of the extension version ('ratified', 'developemnt', etc) @@ -316,6 +353,7 @@ def changes = @data["changes"].nil? ? [] : @data["changes"] def url = @data["url"] + # @return [Array] List of contributors to this extension version def contributors return @contributors unless @contributors.nil? @@ -328,57 +366,70 @@ def contributors # @return [Array] The list of parameters for this extension version def params - @ext.params.select { |p| p.defined_in_extension_version?(@version) } + @ext.params.select { |p| p.defined_in_extension_version?(self) } end - def to_s - "#{name}@#{version}" + # @return [String] formatted like the RVI manual + # + # @example + # ExtensionVersion.new("A", "2.2").to_rvi_s #=> "A2p2" + def to_rvi_s + "#{name}#{@version_spec.to_rvi_s}" end - # @overload ==(other) - # @param other [String] An extension name - # @return [Boolean] whether or not this ExtensionVersion is named 'other' - # @overload ==(other) - # @param other [ExtensionVersion] An extension name and version - # @return [Boolean] whether or not this ExtensionVersion has the exact same name and version as other - def ==(other) - case other - when String - @name == other - when ExtensionVersion - @name == other.name && @version == other.version - else - raise "Unexpected comparison" - end + # @return [String] Ext@Version + def to_s + "#{name}@#{@version_spec.canonical}" + end + + # @return [SchemaCondition] Condition that must be met for this version to be allowed. + # Transitively includes any requirements from an implied extension. + def requirement_condition + @requirement_condition ||= + begin + r = case @data["requires"] + when nil + AlwaysTrueSchemaCondition.new + when Hash + SchemaCondition.new(@data["requires"], @arch_def) + else + SchemaCondition.new({ "oneOf" => [@data["requires"]] }, @arch_def) + end + if @data.key?("implies") + rs = [r] + implications.map(&:requirement_condition) + rs = rs.reject(&:empty?) + r = SchemaCondition.all_of(*rs.map(&:to_h)) unless rs.empty? + end + r + end end - # @param other [ExtensionVersion] Comparison - # @return [Boolean] Whether or not +other+ is an ExtensionVersion with the same name and version - def eql?(other) - return false unless other.is_a?(ExtensionVersion) - - @name == other.name && @version == other.version + # @return [Array] List of extensions that conflict with this ExtensionVersion + # The list is *not* transitive; if conflict C1 implies C2, + # only C1 shows up in the list + def conflicts + @conflicts ||= extension.conflicts.map(&:satisfying_versions).flatten.uniq.sort end - def requirements - r = case @data["requires"] - when nil - AlwaysTrueSchemaCondition.new - when Hash - SchemaCondition.new(@data["requires"]) - else - SchemaCondition.new({"oneOf" => [@data["requires"]]}) - end - if @data.key?("implies") - rs = [r] + implications.map { |e| e.requirements } - rs = rs.reject { |r| r.empty? } - unless rs.empty? - r = SchemaCondition.all_of(*rs.map { |r| r.to_h }) - end + # @return [Array] List of extensions that conflict with this ExtensionVersion + # The list *is* transitive; if conflict C1 implies C2, + # both C1 and C2 show up in the list + def transitive_conflicts + return @transitive_conflicts unless @transive_conflicts.nil? + + @transitive_conflicts = [] + conflicts.each do |c| + @transitive_conflicts << c + @transitive_conflicts.concat(c.transitive_implications) end - r + @transitive_conflicts.uniq! + @transitive_conflicts.sort! + @transitive_conflicts end + # @return [Array] List of extension versions that are implied by with this ExtensionVersion + # This list is *not* transitive; if an implication I1 implies another extension I2, + # only I1 shows up in the list def implications return @implications unless @implications.nil? @@ -388,52 +439,83 @@ def implications return @implications when Array if @data["implies"][0].is_a?(Array) - @implications += @data["implies"].map { |e| ExtensionVersion.new(e[0], e[1], @arch_def) } + @implications.concat(@data["implies"].map { |e| ExtensionVersion.new(e[0], e[1], @arch_def) }) else @implications << ExtensionVersion.new(@data["implies"][0], @data["implies"][1], @arch_def) end end - @implications.uniq! + @implications.sort! @implications end + # @return [Array] List of extension versions that are implied by with this ExtensionVersion + # This list is transitive; if an implication I1 implies another extension I2, + # both I1 and I2 are in the returned list + def transitive_implications + return @transitive_implications unless @transitive_implications.nil? + + @transitive_implications = [] + case @data["implies"] + when nil + return @transitive_implications + when Array + if @data["implies"][0].is_a?(Array) + impls = @data["implies"].map { |e| ExtensionVersion.new(e[0], e[1], @arch_def) } + @transitive_implications.concat(impls) + impls.each do |i| + transitive_impls = i.implications + @transitive_implications.concat(transitive_impls) unless transitive_impls.empty? + end + else + impl = ExtensionVersion.new(@data["implies"][0], @data["implies"][1], @arch_def) + @transitive_implications << impl + transitive_impls = impl.implications + @transitive_implications.concat(transitive_impls) unless transitive_impls.empty? + end + end + @transitive_implications.uniq! + @transitive_implications.sort! + @transitive_implications + end + # @param ext_name [String] Extension name - # @param ext_version_requirements [Number,String,Array] Extension version requirements, taking the same inputs as Gem::Requirement - # @see https://docs.ruby-lang.org/en/3.0/Gem/Requirement.html#method-c-new Gem::Requirement#new + # @param ext_version_requirements [String,Array] Extension version requirements # @return [Boolean] whether or not this ExtensionVersion is named `ext_name` and satifies the version requirements def satisfies?(ext_name, *ext_version_requirements) - @name == ext_name && Gem::Requirement.new(ext_version_requirements).satisfied_by?(@version) + ExtensionRequirement.new(ext_name, ext_version_requirements).satisfied_by?(self) end # sorts extension by name, then by version def <=>(other) - raise ArgumentError, "ExtensionVersions are only comparable to other extension versions" unless other.is_a?(ExtensionVersion) + unless other.is_a?(ExtensionVersion) + raise ArgumentError, "ExtensionVersions are only comparable to other extension versions" + end if other.name != @name @name <=> other.name else - @version <=> other.version + @version_spec <=> other.version_spec end end # @return [Array] the list of CSRs implemented by this extension version (may be empty) - def implemented_csrs(archdef) - raise "should only be called with a fully configured arch def" unless archdef.fully_configured? - + def implemented_csrs return @implemented_csrs unless @implemented_csrs.nil? - @implemented_csrs = archdef.implemented_csrs.select do |csr| + raise "implemented_csrs needs an arch_def" if @arch_def.nil? + + @implemented_csrs = @arch_def.csrs.select do |csr| csr.defined_by?(self) end end - # @return [Array] the list of CSRs implemented by this extension version (may be empty) - def implemented_instructions(archdef) - raise "should only be called with a fully configured arch def" unless archdef.fully_configured? - + # @return [Array] the list of insts implemented by this extension version (may be empty) + def implemented_instructions return @implemented_instructions unless @implemented_instructions.nil? - @implemented_instructions = archdef.implemented_instructions.select do |inst| + raise "implemented_instructions needs an arch_def" if @arch_def.nil? + + @implemented_instructions = @arch_def.instructions.select do |inst| inst.defined_by?(self) end end @@ -555,41 +637,63 @@ def <=>(other) class ExtensionRequirement # @return [String] Extension name attr_reader :name - attr_reader :note # Optional note. Can be nil. - attr_reader :req_id # Optional Requirement ID. Can be nil. - attr_reader :presence # Optional presence (e.g., mandatory, optional, etc.). Can be nil. - # @return [Gem::Requirement] Version requirement - def version_requirement - @requirement - end + # @return [String,nil] Optional note + attr_reader :note + + # @return [String,nil] Optional Requirement ID. + attr_reader :req_id + + # @return [String,nil], Optional presence (e.g., mandatory, optional, etc.) + attr_reader :presence + + # @return [Array] Set of requirement specifications + def requirement_specs = @requirements def to_s - "#{name} #{@requirement}" + "#{name} #{@requirements.map(&:to_s).join(', ')}" + end + + # @return [Extension] The extension that this requirement is for + def extension + return @extension unless @extension.nil? + + raise "Cannot get extension; arch_def was not initialized" if @arch_def.nil? + + @extension = @arch_def.extension(@name) end # @param name [#to_s] Extension name - # @param requirements (see Gem::Requirement#new) - def initialize(name, *requirements, note: nil, req_id: nil, presence: nil) - @name = name.to_s + # @param requirements [String] Single requirement + # @param requirements [Array] List of requirements, all of which must hold + def initialize(name, *requirements, arch_def: nil, note: nil, req_id: nil, presence: nil) + raise ArgumentError, "Arch is required" if arch_def.nil? + + @name = name.to_s.freeze + @arch_def = arch_def + @ext = @arch_def.extension(@name) + + raise ArgumentError, "Could not find extension named '#{@name}'" if @ext.nil? + requirements = if requirements.empty? - [">= 0"] + ["~> #{@ext.min_version.version_str}"] else requirements end - @requirement = Gem::Requirement.new(requirements) - @note = note - @req_id = req_id - @presence = presence + @requirements = requirements.map { |r| RequirementSpec.new(r) } + + @note = note.freeze + @req_id = req_id.freeze + @presence = presence.freeze end # @return [Array] The list of extension versions that satisfy this requirement - def satisfying_versions(archdef) - ext = archdef.extension(@name) + def satisfying_versions + ext = @arch_def.extension(@name) return [] if ext.nil? - ext.versions.select { |v| @requirement.satisfied_by?(v.version) } + ext.versions.select { |v| @requirements.all? { |r| r.satisfied_by?(v.version_spec, ext) } } end # @overload @@ -605,11 +709,16 @@ def satisfying_versions(archdef) def satisfied_by?(*args) if args.size == 1 if args[0].is_a?(ExtensionVersion) - args[0].name == @name && - @requirement.satisfied_by?(Gem::Version.new(args[0].version)) + return false if args[0].name != @name + + @requirements.all? { |r| r.satisfied_by?(args[0].version_spec, @ext) } elsif args[0].is_a?(ExtensionRequirement) - satisfying_versions.all? do |ext_ver| - satified_by?(ext_ver) + return false if args[0].name != @name + + @requirements.all? do |r| + args[0].satisfying_versions.all? do |ext_ver| + r.satisfied_by?(ext_ver.version_spec, @ext) + end end else raise ArgumentError, "Single argument must be an ExtensionVersion or ExtensionRquirement" @@ -618,19 +727,18 @@ def satisfied_by?(*args) raise ArgumentError, "First parameter must be an extension name" unless args[0].respond_to?(:to_s) raise ArgumentError, "First parameter must be an extension version" unless args[1].respond_to?(:to_s) - args[0] == @name && - @requirement.satisfied_by?(Gem::Version.new(args[1])) + return false if args[0] != @name + + @requirements.all? { |r| r.satisfied_by?(args[1], @ext) } else raise ArgumentError, "Wrong number of args (expecting 1 or 2)" end end # @return [Array] List of CSRs defined by any extension satisfying this requirement - def csrs(arch_def) - return @csrs unless @csrs.nil? - - @csrs = arch_def.csrs.select do |csr| - satisfying_versions(arch_def).any? do |ext_ver| + def csrs + @csrs ||= @arch_def.csrs.select do |csr| + satisfying_versions.any? do |ext_ver| csr.defined_by?(ext_ver) end end diff --git a/lib/arch_obj_models/instruction.rb b/lib/arch_obj_models/instruction.rb index 8daaa54be..df5e6c6fb 100644 --- a/lib/arch_obj_models/instruction.rb +++ b/lib/arch_obj_models/instruction.rb @@ -7,6 +7,58 @@ # model of a specific instruction in a specific base (RV32/RV64) class Instruction < ArchDefObject + def self.ary_from_location(location_str_or_int) + return [location_str_or_int] if location_str_or_int.is_a?(Integer) + + bits = [] + parts = location_str_or_int.split("|") + parts.each do |part| + if part.include?("-") + msb, lsb = part.split("-").map(&:to_i) + (lsb..msb).each { |i| bits << i } + else + bits << part.to_i + end + end + bits + end + + def self.validate_encoding(encoding, inst_name) + match = encoding["match"] + raise "No match for instruction #{inst_name}?" if match.nil? + + variables = encoding.key?("variables") ? encoding["variables"] : [] + match.size.times do |i| + if match[match.size - 1 - i] == "-" + # make sure exactly one variable covers this bit + vars_match = variables.count { |variable| ary_from_location(variable["location"]).include?(i) } + if vars_match.zero? + raise ValidationError, "In instruction #{inst_name}, no variable or encoding bit covers bit #{i}" + elsif vars_match != 1 + raise ValidationError, "In instruction, #{inst_name}, bit #{i} is covered by more than one variable" + end + else + # make sure no variable covers this bit + unless variables.nil? + unless variables.none? { |variable| ary_from_location(variable["location"]).include?(i) } + raise ValidationError, "In instruction, #{inst_name}, bit #{i} is covered by both a variable and the match string" + end + end + end + end + end + + def validate + super + + if @data["encoding"]["RV32"].nil? + Instruction.validate_encoding(@data["encoding"], name) + else + Instruction.validate_encoding(@data["encoding"]["RV32"], name) + Instruction.validate_encoding(@data["encoding"]["RV64"], name) + end + end + def ==(other) if other.is_a?(Instruction) name == other.name @@ -211,14 +263,6 @@ def reachable_exceptions_str(symtab, effective_xlen=nil) end end - # @return [ArchDef] The architecture definition - attr_reader :arch_def - - def initialize(data, arch_def) - super(data) - @arch_def = arch_def - end - # represents a single contiguous instruction encoding field # Multiple EncodingFields may make up a single DecodeField, e.g., when an immediate # is split across multiple locations @@ -603,7 +647,7 @@ def operation_ast(symtab) @operation_ast = symtab.archdef.idl_compiler.compile_inst_operation( self, symtab:, - input_file: @data["__source"], + input_file: @data["$source"], input_line: source_line("operation()") ) @@ -675,23 +719,22 @@ def rv64? def excluded_by?(*args) return false if @data["excludedBy"].nil? - excluded_by = SchemaCondition.new(@data["excludedBy"]) + excluded_by = SchemaCondition.new(@data["excludedBy"], @arch_def) - if args.size == 1 - raise ArgumentError, "Parameter must be an ExtensionVersion" unless args[0].is_a?(ExtensionVersion) + ext_ver = + if args.size == 1 + raise ArgumentError, "Parameter must be an ExtensionVersion" unless args[0].is_a?(ExtensionVersion) - excluded_by.satisfied_by? do |r| - r.name == args[0].name && r.version_requirement.satisfied_by?(args[0].version) - end - elsif args.size == 2 - raise ArgumentError, "First parameter must be an extension name" unless args[0].respond_to?(:to_s) - raise ArgumentError, "Second parameter must be an extension version" unless args[0].respond_to?(:to_s) - - version = args[1].is_a?(Gem::Version) ? args[1] : Gem::Version.new(args[1]) + args[0] + elsif args.size == 2 + raise ArgumentError, "First parameter must be an extension name" unless args[0].respond_to?(:to_s) + raise ArgumentError, "Second parameter must be an extension version" unless args[1].respond_to?(:to_s) - excluded_by.satisfied_by? do |r| - r.name == args[0] && r.version_requirement.satisfied_by?(version) + ExtensionVersion.new(args[0], args[1], @arch_def) end + + excluded_by.satisfied_by? do |r| + r.satisfied_by?(ext_ver) end end diff --git a/lib/arch_obj_models/manual.rb b/lib/arch_obj_models/manual.rb index 86a99576a..44d05d1df 100644 --- a/lib/arch_obj_models/manual.rb +++ b/lib/arch_obj_models/manual.rb @@ -5,20 +5,15 @@ require_relative "obj" class Manual < ArchDefObject - def initialize(data, arch_def) - super(data) - @arch_def = arch_def - end - def versions return @versions unless @versions.nil? - @versions = [] - @data["versions"].each do |version| - @versions << ManualVersion.new(version, self, @arch_def) - end - - @versions + @versions = + if @arch_def.nil? + @specification.manual_versions.select { |mv| mv.manual == self } + else + @arch_def.manual_versions.select { |mv| mv.manual == self } + end end def version(name) @@ -27,6 +22,12 @@ def version(name) # @return [String] The title of the manual, as used by marketing def marketing_name = @data["marketing_name"] + + # for manuals that reference an external repo, set the url to that repo data (file path) + def repo_path=(path) + @repo_path = Pathname.new(path) + versions.each { |v| v.repo_path = @repo_path } + end end class ManualChapter @@ -34,23 +35,30 @@ def initialize(volume, path) @volume = volume @version = volume.version - fullpath = "#{@version.path}/#{path}" - raise "Path '#{fullpath}' does not exist" unless File.exist?(fullpath) - - @path = fullpath + @path = Pathname.new path end def name - File.basename(@path, ".adoc") + @path.basename(".adoc").to_s end def title return @title unless @title.nil? - @title = (Asciidoctor.load File.read(path).scrub).doctitle.encode("US-ASCII") + @title = (Asciidoctor.load File.read(fullpath).scrub).doctitle.encode("US-ASCII") end - # @return [String] The absolute path to the chapter + def fullpath + raise "Must call repo_path= first" if @repo_path.nil? + + @repo_path / @path + end + + def repo_path=(path) + @repo_path = path + end + + # @return [Pathname] The relative path to the chapter attr_reader :path end @@ -58,7 +66,6 @@ class ManualVolume # @return [ManualVersion] The version this volume belongs to attr_reader :version - # @return [ArchDef] The architecture definition def arch_def = version.arch_def def initialize(data, version) @@ -97,28 +104,37 @@ def extensions next end - unless ext_obj.versions.any? { |v| v.version == ext[1] } + ext_ver = ExtensionVersion.new(ext[0], ext[1], arch_def) + unless ext_obj.versions.any? { |known_ver| known_ver == ext_ver } warn "Extension '#{ext[0]}', version '#{ext[1]}' is not defined in the database" next end - @extensions << ExtensionVersion.new(ext[0], ext[1], arch_def) + @extensions << ext_ver end @extensions end + + def repo_path=(path) + @repo_path = path + chapters.each { |c| c.repo_path = path } + end end class ManualVersion < ArchDefObject # @return [Manual] The manual this version belongs to - attr_reader :manual - - # @return [ArchDef] The architecture definition - attr_reader :arch_def + def manual + return @manual unless @manual.nil? + + @manual = + if @arch_def.nil? + @specification.ref(@data["manual"]["$ref"]) + else + @arch_def.ref(@data["manual"]["$ref"]) + end + raise "Error: manual #{@data['manual']['$ref']} is not found" if @manual.nil? - def initialize(data, manual, arch_def) - super(data) - @manual = manual - @arch_def = arch_def + @manual end # @return [String] Semantic version number @@ -129,7 +145,7 @@ def marketing_version = @data["marketing_version"] # @return [String] Path to the directory containing contents.yaml file for this version def path - File.dirname(@data["__source"]) + File.dirname(@data["$source"]) end # @return [Boolean] Whether or not this version is using riscv-isa-manual as a source @@ -186,4 +202,9 @@ def csrs end @csrs = @csrs.uniq(&:name) end + + def repo_path=(path) + @repo_path = path + volumes.each { |v| v.repo_path = path } + end end diff --git a/lib/arch_obj_models/obj.rb b/lib/arch_obj_models/obj.rb index 215c4ec47..5f6814c70 100644 --- a/lib/arch_obj_models/obj.rb +++ b/lib/arch_obj_models/obj.rb @@ -25,7 +25,139 @@ # is warranted, e.g., the CSR Field 'alias' returns a CsrFieldAlias object # instead of a simple string class ArchDefObject - attr_reader :data, :name, :long_name, :description + # Exception raised when there is a problem with a schema file + class SchemaError < ::StandardError + # result from JsonSchemer.validate + attr_reader :result + + def initialize(result) + if result.is_a?(Enumerator) + super(result.to_a.map { |e| "At #{e['schema_pointer']}: #{e['type']}" }) + else + super(result["error"]) + end + @result = result + end + end + + # exception raised when an object does not validate against its schema + class SchemaValidationError < ::StandardError + + # result from JsonSchemer.validate + attr_reader :result + + # create a new SchemaValidationError + # + # @param result [JsonSchemer::Result] JsonSchemer result + def initialize(path, result) + msg = "While validating #{path}:\n\n" + nerrors = result.count + msg << "#{nerrors} error(s) during validations\n\n" + result.to_a.each do |r| + msg << + if r["type"] == "required" && !r.dig("details", "missing_keys").nil? + " At '#{r['data_pointer']}': Missing required parameter(s) '#{r['details']['missing_keys']}'\n" + elsif r["type"] == "schema" + if r["schema_pointer"] == "/additionalProperties" + " At #{r['data_pointer']}, there is an unallowed additional key\n" + else + " At #{r['data_pointer']}, endpoint is an invalid key\n" + end + elsif r["type"] == "enum" + " At #{r['data_pointer']}, '#{r['data']}' is not a valid enum value (#{r['schema']['enum']})\n" + elsif r["type"] == "maxProperties" + " Maximum number of properties exceeded\n" + elsif r["type"] == "object" + " At #{r['data_pointer']}, Expecting object, got #{r['data']}\n" + elsif r["type"] == "pattern" + " At #{r['data_pointer']}, RegEx validation failed; '#{r['data']}' does not match '#{r['schema']['pattern']}'\n" + elsif r["type"] == "integer" + " At #{r['data_pointer']}, '#{r['data']}' is not a integer\n" + elsif r["type"] == "array" + " At #{r['data_pointer']}, '#{r['data']}' is not a array\n" + elsif r["type"] == "oneOf" + " At #{r['data_pointer']}, '#{r['data']}' matches more than one of #{r['schema']['oneOf']}\n" + elsif r["type"] == "const" + " At #{r['data_pointer']}, '#{r['data']}' does not match required value '#{r['schema']['const']}'\n" + else + " #{r}\n\n" + end + end + msg << "\n" + # msg << result.to_a.to_s + super(msg) + @result = result + end + end + + attr_reader :data, :data_path, :specification, :arch_def, :name, :long_name, :description + + # @return [Specification] If only a specification (no config) is known + # @return [ArchDef] If a specification and config is known + attr_reader :arch + + def kind = @data["kind"] + + @@schemas ||= {} + @@schema_ref_resolver ||= proc do |pattern| + if pattern.to_s =~ /^http/ + JSON.parse(Net::HTTP.get(pattern)) + else + JSON.load_file($root / "schemas" / pattern.to_s) + end + end + + # validate the data against it's schema + # @raise [SchemaError] if the data is invalid + def validate + schemas = @@schemas + ref_resolver = @@schema_ref_resolver + + if @data.key?("$schema") + schema_path = data["$schema"] + schema_file, obj_path = schema_path.split("#") + schema = + if schemas.key?(schema_file) + schemas[schema_file] + else + schemas[schema_file] = JSONSchemer.schema( + File.read("#{$root}/schemas/#{schema_file}"), + regexp_resolver: "ecma", + ref_resolver:, + insert_property_defaults: true + ) + raise SchemaError, schemas[schema_file].validate_schema unless schemas[schema_file].valid_schema? + + schemas[schema_file] + end + + unless obj_path.nil? + obj_path_parts = obj_path.split("/")[1..] + + obj_path_parts.each do |k| + schema = schema.fetch(k) + end + end + + # convert through JSON to handle anything supported in YAML but not JSON + # (e.g., integer object keys will be coverted to strings) + jsonified_obj = JSON.parse(JSON.generate(@data)) + + raise "Nothing there?" if jsonified_obj.nil? + + raise SchemaValidationError.new(@data_path, schema.validate(jsonified_obj)) unless schema.valid?(jsonified_obj) + else + warn "No $schema for #{@data_path}" + end + end + + # clone this, and set the arch_def at the same time + # @return [ExtensionRequirement] The new object + def clone(arch_def: nil) + obj = super() + obj.instance_variable_set(:@arch_def, arch_def) + obj + end def <=>(other) name <=> other.name @@ -34,7 +166,7 @@ def <=>(other) # @return [String] Source file that data for this object can be attributed to # @return [nil] if the source isn't known def __source - @data["__source"] + @data["$source"] end # The raw content of definedBy in the data. @@ -48,14 +180,22 @@ def definedBy end # @param data [Hash] Hash with fields to be added - def initialize(data) + # @param data_path [Pathname] Path to the data file + def initialize(data, data_path, arch: nil) raise "Bad data" unless data.is_a?(Hash) @data = data + @data_path = data_path + if arch.is_a?(ArchDef) + @arch_def = arch + @specification = arch + elsif arch.is_a?(Specification) + @specification = arch + end + @arch = arch @name = data["name"] @long_name = data["long_name"] @description = data["description"] - end def inspect @@ -73,23 +213,6 @@ def keys = @data.keys # @return (see Hash#key?) def key?(k) = @data.key?(k) - # adds accessor functions for any properties in the data - # def method_missing(method_name, *args, &block) - # if @data.key?(method_name.to_s) - # raise "Unexpected argument to '#{method_name}" unless args.empty? - - # raise "Unexpected block given to '#{method_name}" if block_given? - - # @data[method_name.to_s] - # else - # super - # end - # end - - # def respond_to_missing?(method_name, include_private = false) - # @data.key?(method_name.to_s) || super - # end - # @overload defined_by?(ext_name, ext_version) # @param ext_name [#to_s] An extension name # @param ext_version [#to_s] A specific extension version @@ -98,67 +221,44 @@ def key?(k) = @data.key?(k) # @param ext_version [ExtensionVersion] An extension version # @return [Boolean] Whether or not the instruction is defined by ext_version def defined_by?(*args) - if args.size == 1 - raise ArgumentError, "Parameter must be an ExtensionVersion" unless args[0].is_a?(ExtensionVersion) + ext_ver = + if args.size == 1 + raise ArgumentError, "Parameter must be an ExtensionVersion" unless args[0].is_a?(ExtensionVersion) - defined_by.satisfied_by? do |r| - r.name == args[0].name && r.version_requirement.satisfied_by?(args[0].version) - end - elsif args.size == 2 - raise ArgumentError, "First parameter must be an extension name" unless args[0].respond_to?(:to_s) - version = args[1].is_a?(Gem::Version) ? args[1] : Gem::Version.new(args[1]) + args[0] + elsif args.size == 2 + raise ArgumentError, "First parameter must be an extension name" unless args[0].respond_to?(:to_s) + raise ArgumentError, "First parameter must be an extension version" unless args[1].respond_to?(:to_s) - defined_by.satisfied_by? do |r| - r.name == args[0] && r.version_requirement.satisfied_by?(version) + ExtensionVersion.new(args[0], args[1], arch_def) + else + raise ArgumentError, "Unsupported number of arguments (#{args.size})" end - else - raise ArgumentError, "Unsupported number of arguments of " + args.size - end + + defined_by_condition.satisfied_by? { |req| req.satisfied_by?(ext_ver) } end - # def to_extension_requirement(obj) - # if obj.is_a?(String) - # ExtensionRequirement.new(obj, ">= 0") - # else - # ExtensionRequirement.new(*obj) - # end - # end - # private :to_extension_requirement - - # def to_extension_requirement_list(obj) - # list = [] - # if obj.is_a?(Array) - # # could be either a single extension with exclusion, or a list of exclusions - # if extension_exclusion?(obj[0]) - # list << to_extension_requirement(obj[0]) - # else - # # this is a list - # obj.each do |r| - # list << to_extension_exclusion(r) - # end - # end - # else - # list << to_extension_requirement(obj) - # end - # list - # end + # because of multiple ("allOf") conditions, we generally can't return a list of extension versions here.... + # # @return [Array] Extension(s) that define the instruction. If *any* requirement is met, the instruction is defined. + # def defined_by + # raise "ERROR: definedBy is nul for #{name}" if @data["definedBy"].nil? - # def extension_requirement?(obj) - # obj.is_a?(String) && obj =~ /^([A-WY])|([SXZ][a-z]+)$/ || - # obj.is_a?(Array) && obj[0] =~ /^([A-WY])|([SXZ][a-z]+)$/ + # SchemaCondition.new(@data["definedBy"], @arch_def).satisfying_ext_versions # end - # private :extension_requirement? # @return [SchemaCondition] Extension(s) that define the instruction. If *any* requirement is met, the instruction is defined. - def defined_by - raise "ERROR: definedBy is nul for #{name}" if @data["definedBy"].nil? + def defined_by_condition + @defined_by_condition ||= + begin + raise "ERROR: definedBy is nul for #{name}" if @data["definedBy"].nil? - SchemaCondition.new(@data["definedBy"]) + SchemaCondition.new(@data["definedBy"], @arch_def) + end end # @return [String] Name of an extension that "primarily" defines the object (i.e., is the first in a list) def primary_defined_by - defined_by.first_requirement.name + defined_by_condition.first_requirement end # @return [Integer] THe source line number of +path+ in the YAML file @@ -176,8 +276,8 @@ def primary_defined_by def source_line(*path) # find the line number of this operation() in the *original* file - yaml_filename = @data["__source"] - raise "No __source for #{name}" if yaml_filename.nil? + yaml_filename = @data["$source"] + raise "No $source for #{name}" if yaml_filename.nil? line = nil path_idx = 0 Psych.parse_stream(File.read(yaml_filename), filename: yaml_filename) do |doc| @@ -208,7 +308,7 @@ def source_line(*path) end end end - raise "Didn't find key '#{path}' in #{@data['__source']}" + raise "Didn't find key '#{path}' in #{@data['$source']}" end end @@ -241,7 +341,7 @@ def text end # Personal information about a contributor -class Person < ArchDefObject +class Person # @return [String] Person's name def name = @data["name"] @@ -252,6 +352,10 @@ def email = @data["email"] # @return [String] Company the person works for # @return [nil] if the company is not known, or if the person is an individual contributor def company = @data["company"] + + def initialize(data) + @data = data + end end # represents a JSON Schema compoisition, e.g.: @@ -264,7 +368,7 @@ def company = @data["company"] # class SchemaCondition # @param composition_hash [Hash] A possibly recursive hash of "allOf", "anyOf", "oneOf" - def initialize(composition_hash) + def initialize(composition_hash, arch_def) raise ArgumentError, "composition_hash is nil" if composition_hash.nil? unless is_a_condition?(composition_hash) @@ -272,6 +376,7 @@ def initialize(composition_hash) end @hsh = composition_hash + @arch_def = arch_def end def to_h = @hsh @@ -346,10 +451,14 @@ def is_a_condition?(hsh) def first_requirement(req = @hsh) case req when String - ExtensionRequirement.new(req, ">= 0") + ExtensionRequirement.new(req, arch_def: @arch_def) when Hash if req.key?("name") - ExtensionRequirement.new(req["name"], req["version"] || ">= 0") + if req["version"].nil? + ExtensionRequirement.new(req["name"], arch_def: @arch_def) + else + ExtensionRequirement.new(req["name"], req["version"], arch_def: @arch_def) + end else first_requirement(req[req.keys[0]]) end @@ -361,12 +470,12 @@ def first_requirement(req = @hsh) end # combine all conds into one using AND - def self.all_of(*conds) + def self.all_of(*conds, arch_def:) cond = SchemaCondition.new({ "allOf" => conds - }) - - SchemaCondition.new(cond.minimize) + }, arch_def) + + SchemaCondition.new(cond.minimize, arch_def) end # @return [Object] Schema for this condition, with basic logic minimization @@ -404,14 +513,14 @@ def to_rb_helper(hsh) if hsh.key?("name") if hsh.key?("version") if hsh["version"].is_a?(String) - "(yield ExtensionRequirement.new('#{hsh["name"]}', '#{hsh["version"]}'))" + "(yield ExtensionRequirement.new('#{hsh["name"]}', '#{hsh["version"]}', arch_def: @arch_def))" elsif hsh["version"].is_a?(Array) - "(yield ExtensionRequirement.new('#{hsh["name"]}', #{hsh["version"].map { |v| "'#{v}'" }.join(', ')}))" + "(yield ExtensionRequirement.new('#{hsh["name"]}', #{hsh["version"].map { |v| "'#{v}'" }.join(', ')}, arch_def: @arch_def))" else raise "unexpected" end else - "(yield ExtensionRequirement.new('#{hsh["name"]}'))" + "(yield ExtensionRequirement.new('#{hsh["name"]}', arch_def: @arch_def))" end else key = hsh.keys[0] @@ -432,7 +541,7 @@ def to_rb_helper(hsh) end end else - "(yield ExtensionRequirement.new('#{hsh}'))" + "(yield ExtensionRequirement.new('#{hsh}', arch_def: @arch_def))" end end @@ -470,6 +579,16 @@ def satisfied_by?(&block) eval to_rb end + + def satisfying_ext_versions + list = [] + arch_def.extensions.each do |ext| + ext.versions.each do |ext_ver| + list << ext_ver if satisfied_by? { |ext_req| ext_req.satisfied_by?(ext_ver) } + end + end + list + end end class AlwaysTrueSchemaCondition @@ -480,4 +599,5 @@ def satisfied_by? = true def empty? = true def to_h = {} + def minimize = {} end diff --git a/lib/arch_obj_models/portfolio.rb b/lib/arch_obj_models/portfolio.rb index e378cdc01..1b30169a5 100644 --- a/lib/arch_obj_models/portfolio.rb +++ b/lib/arch_obj_models/portfolio.rb @@ -9,6 +9,8 @@ # # A variable name with a "_data" suffix indicates it is the raw hash data from the porfolio YAML file. +require "tmpdir" + require_relative "obj" require_relative "schema" @@ -22,13 +24,6 @@ class PortfolioClass < ArchDefObject # @return [ArchDef] The defining ArchDef attr_reader :arch_def - # @param data [Hash] The data from YAML - # @param arch_def [ArchDef] Architecture spec - def initialize(data, arch_def) - super(data) - @arch_def = arch_def - end - def introduction = @data["introduction"] def naming_scheme = @data["naming_scheme"] def description = @data["description"] @@ -49,13 +44,6 @@ class PortfolioInstance < ArchDefObject # @return [ArchDef] The defining ArchDef attr_reader :arch_def - # @param data [Hash] The data from YAML - # @param arch_def [ArchDef] Architecture spec - def initialize(data, arch_def) - super(data) - @arch_def = arch_def - end - def description = @data["description"] # @return [Gem::Version] Semantic version of the PortfolioInstance @@ -113,13 +101,15 @@ def in_scope_ext_reqs(desired_presence = nil) in_scope_ext_reqs = [] # Convert desired_present argument to ExtensionPresence object if not nil. - desired_presence_converted = - desired_presence.nil? ? nil : + desired_presence_converted = + desired_presence.nil? ? nil : desired_presence.is_a?(String) ? desired_presence : desired_presence.is_a?(ExtensionPresence) ? desired_presence : ExtensionPresence.new(desired_presence) @data["extensions"]&.each do |ext_name, ext_data| + next if ext_name[0] == "$" + actual_presence = ext_data["presence"] # Could be a String or Hash raise "Missing extension presence for extension #{ext_name}" if actual_presence.nil? @@ -134,9 +124,16 @@ def in_scope_ext_reqs(desired_presence = nil) end if match - in_scope_ext_reqs << - ExtensionRequirement.new(ext_name, ext_data["version"], presence: actual_presence_obj, - note: ext_data["note"], req_id: "REQ-EXT-" + ext_name) + in_scope_ext_reqs << + if ext_data.key?("version") + ExtensionRequirement.new( + ext_name, ext_data["version"], arch_def: @arch_def, + presence: actual_presence_obj, note: ext_data["note"], req_id: "REQ-EXT-#{ext_name}") + else + ExtensionRequirement.new( + ext_name, arch_def: @arch_def, + presence: actual_presence_obj, note: ext_data["note"], req_id: "REQ-EXT-#{ext_name}") + end end end in_scope_ext_reqs @@ -184,23 +181,30 @@ def uses_optional_types? def to_arch_def return @generated_arch_def unless @generated_arch_def.nil? - arch_def_data = arch_def.unconfigured_data - - arch_def_data["mandatory_extensions"] = mandatory_ext_reqs.map do |ext_req| - { - "name" => ext_req.name, - "version" => ext_req.version_requirement.requirements.map { |r| "#{r[0]} #{r[1]}" } - } - end - arch_def_data["params"] = all_in_scope_ext_params.select(&:single_value?).map { |p| [p.name, p.value] }.to_h + # build up a config for the certificate + config_data = { + "$schema" => "config_schema.json", + "type" => "partially configured", + "kind" => "architecture configuration", + "name" => name, + "description" => "A partially configured architecture definition corresponding to the #{name} portfolio.", + "mandatory_extensions" => mandatory_ext_reqs.map do |ext_req| + { + "name" => ext_req.name, + "version" => ext_req.requirement_specs.map(&:to_s) + } + end, + "params" => all_in_scope_ext_params.select(&:single_value?).map { |p| [p.name, p.value] }.to_h + } # XXX Add list of prohibited_extensions - file = Tempfile.new("archdef") - file.write(YAML.safe_dump(arch_def_data, permitted_classes: [Date])) - file.flush - file.close - @generated_arch_def = ArchDef.new(name, Pathname.new(file.path)) + @generated_arch_def = + Dir.mktmpdir do |dir| + FileUtils.mkdir("#{dir}/#{name}") + File.write("#{dir}/#{name}/cfg.yaml", YAML.safe_dump(config_data, permitted_classes: [Date])) + @generated_arch_def = ArchDef.new(name, @arch_def.path, cfg_path: dir) + end end ################################### @@ -272,7 +276,9 @@ def all_in_scope_ext_params @all_in_scope_ext_params = [] - @data["extensions"].each do |ext_name, ext_data| + @data["extensions"].each do |ext_name, ext_data| + next if ext_name[0] == "$" + # Find Extension object from database ext = @arch_def.extension(ext_name) raise "Cannot find extension named #{ext_name}" if ext.nil? @@ -282,11 +288,12 @@ def all_in_scope_ext_params raise "There is no param '#{param_name}' in extension '#{ext_name}" if param.nil? next unless ext.versions.any? do |ext_ver| - Gem::Requirement.new(ext_data["version"]).satisfied_by?(ext_ver.version) && - param.defined_in_extension_version?(ext_ver.version) + ver_req = ext_data["version"] || ">= #{ext.min_version.version_spec}" + ExtensionRequirement.new(ext_name, ver_req, arch_def: @arch_def).satisfied_by?(ext_ver) && + param.defined_in_extension_version?(ext_ver) end - @all_in_scope_ext_params << + @all_in_scope_ext_params << InScopeExtensionParameter.new(param, param_data["schema"], param_data["note"]) end end @@ -311,17 +318,17 @@ def in_scope_ext_params(ext_req) # Loop through an extension's parameter constraints (hash) from the portfolio. # Note that "&" is the Ruby safe navigation operator (i.e., skip do loop if nil). ext_data["parameters"]&.each do |param_name, param_data| - # Find ExtensionParameter object from database - ext_param = ext.params.find { |p| p.name == param_name } - raise "There is no param '#{param_name}' in extension '#{ext_req.name}" if ext_param.nil? + # Find ExtensionParameter object from database + ext_param = ext.params.find { |p| p.name == param_name } + raise "There is no param '#{param_name}' in extension '#{ext_req.name}" if ext_param.nil? - next unless ext.versions.any? do |ext_ver| - Gem::Requirement.new(ext_data["version"]).satisfied_by?(ext_ver.version) && - ext_param.defined_in_extension_version?(ext_ver.version) - end + next unless ext.versions.any? do |ext_ver| + ext_req.satisfied_by?(ext_ver) && + ext_param.defined_in_extension_version?(ext_ver) + end - ext_params << - InScopeExtensionParameter.new(ext_param, param_data["schema"], param_data["note"]) + ext_params << + InScopeExtensionParameter.new(ext_param, param_data["schema"], param_data["note"]) end ext_params @@ -338,8 +345,8 @@ def all_out_of_scope_params next if all_in_scope_ext_params.any? { |c| c.param.name == param.name } next unless ext.versions.any? do |ext_ver| - Gem::Requirement.new(ext_req.version_requirement).satisfied_by?(ext_ver.version) && - param.defined_in_extension_version?(ext_ver.version) + ext_req.satisfied_by?(ext_ver) && + param.defined_in_extension_version?(ext_ver) end @all_out_of_scope_params << param @@ -350,7 +357,7 @@ def all_out_of_scope_params # @return [Array] Parameters that are out of scope for named extension. def out_of_scope_params(ext_name) - all_out_of_scope_params.select{|param| param.exts.any? {|ext| ext.name == ext_name} } + all_out_of_scope_params.select{ |param| param.exts.any? { |ext| ext.name == ext_name } } end # @return [Array] @@ -418,9 +425,9 @@ def all_in_scope_exts_without_param(param) # Tracks history of portfolio document. This is separate from its version since # a document may be revised several times before a new version is released. - class RevisionHistory < ArchDefObject + class RevisionHistory def initialize(data) - super(data) + @data = data end def revision = @data["revision"] @@ -442,9 +449,9 @@ def revision_history # ExtraNote Subclass # ###################### - class ExtraNote < ArchDefObject + class ExtraNote def initialize(data) - super(data) + @data = data @presence_obj = ExtensionPresence.new(@data["presence"]) end @@ -476,9 +483,9 @@ def extra_notes_for_presence(desired_presence_obj) # Recommendation Subclass # ########################### - class Recommendation < ArchDefObject + class Recommendation def initialize(data) - super(data) + @data = data end def text = @data["text"] @@ -493,4 +500,4 @@ def recommendations end @recommendations end -end \ No newline at end of file +end diff --git a/lib/arch_obj_models/profile.rb b/lib/arch_obj_models/profile.rb index 44d63f3b4..38b421f6a 100644 --- a/lib/arch_obj_models/profile.rb +++ b/lib/arch_obj_models/profile.rb @@ -7,12 +7,6 @@ # that each include an unprivileged profile (e.g., RVA20U64) and one more # privileged profiles (e.g., RVA20S64). class ProfileClass < PortfolioClass - # @param data [Hash] The data from YAML - # @param arch_def [ArchDef] Architecture spec - def initialize(data, arch_def) - super(data, arch_def) - end - # @return [String] Name of the class def marketing_name = @data["marketing_name"] @@ -68,13 +62,6 @@ def referenced_extensions # Note there is no Portfolio* base class for a ProfileRelease to inherit from since there is no # equivalent to a ProfileRelease in a Certificate so no potential for a shared base class. class ProfileRelease < ArchDefObject - # @param data [Hash] The data from YAML - # @param arch_def [ArchDef] Architecture spec - def initialize(data, arch_def) - super(data) - @arch_def = arch_def - end - def marketing_name = @data["marketing_name"] def introduction = @data["introduction"] def state = @data["state"] @@ -105,10 +92,8 @@ def profiles return @profiles unless @profiles.nil? @profiles = [] - @arch_def.profiles.each do |profile| - if profile.profile_release.name == name - @profiles << profile - end + @data["profiles"].each do |profile_ref| + @profiles << @arch_def.ref(profile_ref["$ref"]) end @profiles end @@ -130,9 +115,6 @@ def referenced_extensions # Representation of a specific profile in a profile release. class Profile < PortfolioInstance - def initialize(data, arch_def) - super(data, arch_def) - end # @return [String] The marketing name of the Profile def introduction = @data["introduction"] @@ -140,7 +122,7 @@ def marketing_name = @data["marketing_name"] # @return [ProfileRelease] The profile release this profile belongs to def profile_release - profile_release = @arch_def.profile_release(@data["release"]) + profile_release = @arch_def.ref(@data["release"]) raise "No profile release named '#{@data["release"]}'" if profile_release.nil? profile_release diff --git a/lib/config.rb b/lib/config.rb new file mode 100644 index 000000000..281964c3d --- /dev/null +++ b/lib/config.rb @@ -0,0 +1,169 @@ +# frozen_string_literal: true + +require "pathname" + +class Config + # @return [Hash] A hash mapping parameter name to value for any parameter that has been configured with a value. May be empty. + attr_reader :param_values + + # use Config#create instead + private_class_method :new + + def self.freeze_data(obj) + if obj.is_a?(Hash) + obj.each do |k, v| + obj[k] = freeze_data(v) + end + elsif obj.is_a?(Array) + obj.each { |v| freeze_data(v) } + end + + obj.freeze + end + private_class_method :freeze_data + + # factory method to create a FullConfig, PartialConfig, or Unconfig based on the contents of cfg_filename + # + # @return [Config] A new Config + def self.create(cfg_filename) + cfg_file_path = Pathname.new(cfg_filename) + raise ArgumentError, "Cannot find #{cfg_filename}" unless cfg_file_path.exist? + + data = YAML.load(cfg_file_path.read, permitted_classes: [Date]) + + # now deep freeze the data + freeze_data(data) + + case data["type"] + when "fully configured" + FullConfig.send(:new, cfg_file_path, data) + when "partially configured" + PartialConfig.send(:new, cfg_file_path, data) + when "unconfigured" + Unconfig.send(:new, cfg_file_path, data) + else + raise "Unexpected type in config" + end + end + + def initialize(cfg_file_path, data) + @cfg_file_path = cfg_file_path + @data = data + end + + def name = @data["name"] + + def fully_configured? = @data["type"] == "fully configured" + def partially_configured? = @data["type"] == "partially configured" + def unconfigured? = @data["type"] == "unconfigured" + def configured? = @data["type"] != "unconfigured" + def type = @data["type"] +end + +class Unconfig < Config + attr_reader :param_values + + def initialize(cfg_file_path, data) + super(cfg_file_path, data) + + @param_values = {}.freeze + end + + def mxlen = nil + + def implemented_extensions = raise "implemented_extensions is only availabe for a FullConfig" + def mandatory_extensions = raise "mandatory_extensions is only availabe for a PartialConfig" +end + +class PartialConfig < Config + attr_reader :param_values, :mxlen + + def initialize(cfg_file_path, data) + super(cfg_file_path, data) + + @param_values = @data.key?("params") ? @data["params"] : [].freeze + + @mxlen = @data.dig("params", "XLEN") + raise "Must set XLEN for a configured config" if @mxlen.nil? + + @mxlen.freeze + end + + def implemented_extensions = raise "implemented_extensions is only availabe for a FullConfig" + + # @return [Array] List of all extensions that must be implemented, as specified in the config file + # Implied/required extensions are *not* transitively included (though they are from ArchDef#mandatory_extensions) + def mandatory_extensions(arch_def) + @mandatory_extensions ||= + if @data.key?("mandatory_extensions") + @data["mandatory_extensions"].map do |e| + ext = arch_def.extension(e["name"]) + raise "Cannot find extension #{e['name']} in the architecture definition" if ext.nil? + + req_spec = e["version"].is_a?(Array) ? e["version"] : [e["version"]] + ExtensionRequirement.new(e["name"], *req_spec, presence: "mandatory", arch_def:) + end + else + [] + end + end + + # @return [Array] List of all extensions that are prohibited. + # This only includes extensions explicitly prohibited by the config file. + def prohibited_extensions(arch_def) + return @prohibited_extensions unless @prohibited_extensions.nil? + + @prohibited_extensions = [] + if @data.key?("prohibited_extensions") + @data["prohibited_extensions"].each do |e| + @prohibited_extensions << + if e.is_a?(String) + ExtensionRequirement.new(e, nil, arch_def:) + else + ExtensionRequirement.new(e["name"], e["version"], presence: "prohibited", arch_def:) + end + end + end + @prohibited_extensions + end + + def prohibited_ext?(ext_name, arch_def) = prohibited_extensions(arch_def).any? { |e| e.name == ext_name.to_s } + + def ext?(ext_name, arch_def) = mandatory_extensions(arch_def).any? { |e| e.name == ext_name.to_s } +end + + +class FullConfig < Config + attr_reader :param_values, :mxlen + + def initialize(cfg_file_path, data) + super(cfg_file_path, data) + + @param_values = @data["params"] + + @mxlen = @data.dig("params", "XLEN").freeze + raise "Must set XLEN for a configured config" if @mxlen.nil? + end + + # @return [Array] List of all extensions known to be implemented in this architecture + def implemented_extensions(arch_def) + return @implemented_extensions unless @implemented_extensions.nil? + + @implemented_extensions = [] + if @data.key?("implemented_extensions") + @data["implemented_extensions"].each do |e| + if e.is_a?(Array) + @implemented_extensions << ExtensionVersion.new(e[0], e[1], arch_def) + else + @implemented_extensions << ExtensionVersion.new(e["name"], e["version"], arch_def) + end + end + end + @implemented_extensions + end + + def mandatory_extensions = raise "mandatory_extensions is only availabe for a PartialConfig" + + # def prohibited_ext?(ext_name, arch_def) = !ext?(ext_name, arch_def) + # def ext?(ext_name, arch_def) = implemented_extensions(arch_def).any? { |e| e.name == ext_name.to_s } +end diff --git a/lib/idl.rb b/lib/idl.rb index 47250ddb2..68bcba499 100644 --- a/lib/idl.rb +++ b/lib/idl.rb @@ -35,10 +35,8 @@ def instantiate_node(node_type, *args) module Idl # the Idl compiler class Compiler - # @param arch_def [ArchDef] Architecture defintion, the context of the compilation - def initialize(arch_def) + def initialize @parser = IdlParser.new - @arch_def = arch_def end def compile_file(path) diff --git a/lib/idl/ast.rb b/lib/idl/ast.rb index 5915b88a9..c5b161856 100644 --- a/lib/idl/ast.rb +++ b/lib/idl/ast.rb @@ -48,6 +48,7 @@ class AstNode Bits32Type = Type.new(:bits, width: 32).freeze Bits64Type = Type.new(:bits, width: 64).freeze BitsUnknownType = Type.new(:bits, width: :unknown).freeze + ConstBitsUnknownType = Type.new(:bits, width: :unknown, qualifiers: [:const]).freeze ConstBoolType = Type.new(:boolean, qualifiers: [:const]).freeze BoolType = Type.new(:boolean).freeze VoidType = Type.new(:void).freeze @@ -1502,20 +1503,21 @@ def type_check(symtab) type_error "Array index must be integral" unless index.type(symtab).integral? - if var.type(symtab).kind == :array + var_type = var.type(symtab) + if var_type.kind == :array value_result = value_try do index_value = index.value(symtab) - if var.type(symtab).width != :unknown - type_error "Array index out of range" if index_value >= var.type(symtab).width + if var_type.width != :unknown + type_error "Array index out of range" if index_value >= var_type.width end end # Ok, doesn't need to be known - elsif var.type(symtab).integral? - if var.type(symtab).kind == :bits + elsif var_type.integral? + if var_type.kind == :bits value_result = value_try do index_value = index.value(symtab) - if index_value >= var.type(symtab).width - type_error "Bits element index (#{index_value}) out of range (max #{var.type(symtab).width - 1}) in access '#{text_value}'" + if (var_type.width != :unknown) && (index_value >= var_type.width) + type_error "Bits element index (#{index_value}) out of range (max #{var_type.width - 1}) in access '#{text_value}'" end end # OK, doesn need to be known end @@ -1526,9 +1528,10 @@ def type_check(symtab) end def type(symtab) - if var.type(symtab).kind == :array - var.type(symtab).sub_type - elsif var.type(symtab).integral? + var_type = var.type(symtab) + if var_type.kind == :array + var_type.sub_type + elsif var_type.integral? Bits1Type else internal_error "Bad ary element access" @@ -1583,8 +1586,9 @@ def type_check(symtab) msb_value = msb.value(symtab) lsb_value = lsb.value(symtab) - if var.type(symtab).kind == :bits && msb_value >= var.type(symtab).width - type_error "Range too large for bits (msb = #{msb_value}, range size = #{var.type(symtab).width})" + var_type = var.type(symtab) + if var_type.kind == :bits && var_type.width != :unknown && msb_value >= var_type.width + type_error "Range too large for bits (msb = #{msb_value}, range size = #{var_type.width})" end range_size = msb_value - lsb_value + 1 @@ -2405,7 +2409,6 @@ def type_check(symtab) decl_type = lhs_type(symtab) - if decl_type.const? # this is a constant; ensure we are assigning a constant value value_result = value_try do @@ -2413,7 +2416,7 @@ def type_check(symtab) end value_else(value_result) do unless rhs.type(symtab).const? - type_error "Declaring constant with a non-constant value (#{e})" + type_error "Declaring constant with a non-constant value (#{rhs.text_value})" end symtab.add(lhs.text_value, Var.new(lhs.text_value, decl_type.clone)) end @@ -2951,14 +2954,19 @@ def value(symtab) elsif op == "|" # if one side is all ones, we don't need to know the other side + rhs_type = rhs.type(symtab) + value_error("Unknown width") if rhs_type.width == :unknown + lhs_type = lhs.type(symtab) + value_error("unknown width") if lhs_type.width == :unknown + value_result = value_try do - rhs_mask = ((1 << rhs.type(symtab).width) - 1) - return rhs_mask if (rhs.value(symtab) == rhs_mask) && (lhs.type(symtab).width <= rhs.type(symtab).width) + rhs_mask = ((1 << rhs_type.width) - 1) + return rhs_mask if (rhs.value(symtab) == rhs_mask) && (lhs_type.width <= rhs_type.width) end - # ok, trye rhs + # ok, try rhs - lhs_mask = ((1 << lhs.type(symtab).width) - 1) - return lhs_mask if (lhs.value(symtab) == lhs_mask) && (rhs.type(symtab).width <= lhs.type(symtab).width) + lhs_mask = ((1 << lhs_type.width) - 1) + return lhs_mask if (lhs.value(symtab) == lhs_mask) && (rhs_type.width <= lhs_type.width) lhs.value(symtab) | rhs.value(symtab) @@ -2992,6 +3000,10 @@ def value(symtab) v_trunc = if !lhs.type(symtab).const? || !rhs.type(symtab).const? # when both sides are constant, the value is not truncated + width = type(symtab).width + if width == :unknown + value_error("unknown width in op that possibly truncates") + end v & ((1 << type(symtab).width) - 1) else v @@ -3098,15 +3110,21 @@ def type_check(symtab) expressions.each do |exp| exp.type_check(symtab) - type_error "Concatenation only supports Bits<> types" unless exp.type(symtab).kind == :bits + e_type = exp.type(symtab) + type_error "Concatenation only supports Bits<> types" unless e_type.kind == :bits - internal_error "Negative width for element #{exp.text_value}" if exp.type(symtab).width <= 0 + internal_error "Negative width for element #{exp.text_value}" if (e_type.width != :unknown) && (e_type.width <= 0) end end # @!macro type def type(symtab) - total_width = expressions.reduce(0) { |sum, exp| sum + exp.type(symtab).width } + total_width = expressions.reduce(0) do |sum, exp| + e_type = exp.type(symtab) + return BitsUnknownType if e_type.width == :unknown + + sum + e_type.width + end Type.new(:bits, width: total_width) end @@ -3562,12 +3580,16 @@ def value(symtab) else internal_error "Unhandled unary op #{op}" end - if type(symtab).integral? - val_trunc = val & ((1 << type(symtab).width) - 1) - if type(symtab).signed? && ((((val_trunc >> (type(symtab).width - 1))) & 1) == 1) + t = type(symtab) + if t.integral? + if t.width == :unknown + value_error("Unknown width for truncation") + end + val_trunc = val & ((1 << t.width) - 1) + if t.signed? && ((((val_trunc >> (t.width - 1))) & 1) == 1) # need to make this negative! # take the twos compliment - val_trunc = -((1 << type(symtab).width) - val_trunc) + val_trunc = -((1 << t.width) - val_trunc) end end @@ -4298,7 +4320,15 @@ class IntLiteralAst < AstNode def initialize(input, interval) super(input, interval, EMPTY_ARRAY) - @types = [nil, nil] + end + + def freeze_tree(global_symtab) + return if frozen? + + # initialize the cached objects + type(global_symtab) + value(global_symtab) + freeze end # @!macro type_check @@ -4309,8 +4339,7 @@ def type_check(symtab) value_text = ::Regexp.last_match(6) if width.nil? || width == "XLEN" - width = symtab.mxlen - memoize = false + width = symtab.mxlen.nil? ? 32 : symtab.mxlen # 32 is the min width, which is what we care about here end # ensure we actually have enough bits to represent the value @@ -4320,50 +4349,35 @@ def type_check(symtab) # @!macro type def type(symtab) - return BitsUnknownType if symtab.mxlen.nil? - - cache_idx = symtab.mxlen >> 6 # 0 = 32, 1 = 64 - return @types[cache_idx] unless @types[cache_idx].nil? + return @type unless @type.nil? case text_value.delete("_") when /^((XLEN)|([0-9]+))?'(s?)([bodh]?)(.*)$/ # verilog-style literal - width = ::Regexp.last_match(1) signed = ::Regexp.last_match(4) - - memoize = true - if width.nil? || width == "XLEN" - width = symtab.mxlen - memoize = false - end + width = width(symtab) qualifiers = signed == "s" ? [:signed, :const] : [:const] - t = Type.new(:bits, width: width.to_i, qualifiers:) - @types[cache_idx] = t if memoize - t + @type = Type.new(:bits, width: width, qualifiers:) when /^0([bdx]?)([0-9a-fA-F]*)(s?)$/ # C++-style literal signed = ::Regexp.last_match(3) qualifiers = signed == "s" ? [:signed, :const] : [:const] - type = Type.new(:bits, width: width(symtab), qualifiers:) - @types[cache_idx] = type - type + @type = Type.new(:bits, width: width(symtab), qualifiers:) when /^([0-9]*)(s?)$/ # basic decimal signed = ::Regexp.last_match(2) qualifiers = signed == "s" ? [:signed, :const] : [:const] - type = Type.new(:bits, width: width(symtab), qualifiers:) - @types[cache_idx] = type - type + @type = Type.new(:bits, width: width(symtab), qualifiers:) else internal_error "Unhandled int value" end end def width(symtab) - # return @width unless @width.nil? + return @width unless @width.nil? text_value_no_underscores = text_value.delete("_") @@ -4371,29 +4385,26 @@ def width(symtab) when /^((XLEN)|([0-9]+))?'(s?)([bodh]?)(.*)$/ # verilog-style literal width = ::Regexp.last_match(1) - memoize = true if width.nil? || width == "XLEN" - width = symtab.mxlen - memoize = false + width = symtab.mxlen.nil? ? :unknown : symtab.mxlen + else + width = width.to_i end - # @width = width if memoize - width + @width = width when /^0([bdx]?)([0-9a-fA-F]*)(s?)$/ signed = ::Regexp.last_match(3) width = signed == "s" ? value(symtab).bit_length + 1 : value(symtab).bit_length width = 1 if width.zero? # happens when the literal is '0' - # @width = width - width + @width = width when /^([0-9]*)(s?)$/ signed = ::Regexp.last_match(3) width = signed == "s" ? value(symtab).bit_length + 1 : value(symtab).bit_length width = 1 if width.zero? # happens when the literal is '0' - # @width = width - width + @width = width else internal_error "No match on int literal" end @@ -4401,65 +4412,64 @@ def width(symtab) # @!macro value def value(symtab) - # return @value unless @value.nil? + return @value unless @value.nil? if text_value.delete("_") =~ /^((XLEN)|([0-9]+))?'(s?)([bodh]?)(.*)$/ # verilog-style literal - width = ::Regexp.last_match(1) signed = ::Regexp.last_match(4) - - memoize = true - if width.nil? || width == "XLEN" - width = symtab.mxlen - memoize = false - end + width = width(symtab) v = - if !signed.empty? && ((unsigned_value >> (width.to_i - 1)) == 1) - -(2**width.to_i - unsigned_value) + if width == :unknown + if !signed.empty? + if unsigned_value > 0x7fff_ffff + value_error("Don't know if value will be negative") + else + if unsigned_value > 0xffff_ffff + value_error("Don't know if value will fit in literal") + end + unsigned_value + end + else + if unsigned_value > 0xffff_ffff + value_error("Don't know if value will fit in literal") + end + unsigned_value + end else - unsigned_value + if unsigned_value.bit_length > width + value_error("Value does not fit in literal") + end + if !signed.empty? && ((unsigned_value >> (width - 1)) == 1) + if unsigned_value.bit_length > (width - 1) + value_error("Value does not fit in literal") + end + -(2**width.to_i - unsigned_value) + else + unsigned_value + end end - # @value = v if memoize - v + @value = v else - # @value = unsigned_value - unsigned_value + @value = unsigned_value end end # @return [Integer] the unsigned value of this literal (i.e., treating it as unsigned even if the signed specifier is present) def unsigned_value - # return @unsigned_value unless @unsigned_value.nil? + return @unsigned_value unless @unsigned_value.nil? - case text_value.delete("_") - when /^((XLEN)|([0-9]+))?'(s?)([bodh]?)(.*)$/ - # verilog-style literal - radix_id = ::Regexp.last_match(5) - value = ::Regexp.last_match(6) + @unsigned_value = + case text_value.delete("_") + when /^((XLEN)|([0-9]+))?'(s?)([bodh]?)(.*)$/ + # verilog-style literal + radix_id = ::Regexp.last_match(5) + value = ::Regexp.last_match(6) - radix_id = "d" if radix_id.empty? + radix_id = "d" if radix_id.empty? - case radix_id - when "b" - value.to_i(2) - when "o" - value.to_i(8) - when "d" - value.to_i(10) - when "h" - value.to_i(16) - end - when /^0([bdx]?)([0-9a-fA-F]*)(s?)$/ - # C++-style literal - radix_id = ::Regexp.last_match(1) - value = ::Regexp.last_match(2) - - radix_id = "o" if radix_id.empty? - - # @unsigned_value = case radix_id when "b" value.to_i(2) @@ -4467,19 +4477,37 @@ def unsigned_value value.to_i(8) when "d" value.to_i(10) - when "x" + when "h" value.to_i(16) end + when /^0([bdx]?)([0-9a-fA-F]*)(s?)$/ + # C++-style literal + radix_id = ::Regexp.last_match(1) + value = ::Regexp.last_match(2) + + radix_id = "o" if radix_id.empty? + + # @unsigned_value = + case radix_id + when "b" + value.to_i(2) + when "o" + value.to_i(8) + when "d" + value.to_i(10) + when "x" + value.to_i(16) + end - when /^([0-9]*)(s?)$/ - # basic decimal - value = ::Regexp.last_match(1) + when /^([0-9]*)(s?)$/ + # basic decimal + value = ::Regexp.last_match(1) - # @unsigned_value = value.to_i(10) - value.to_i(10) - else - internal_error "Unhandled int value '#{text_value}'" - end + # @unsigned_value = value.to_i(10) + value.to_i(10) + else + internal_error "Unhandled int value '#{text_value}'" + end end # @!macro to_idl @@ -5925,7 +5953,7 @@ def value(symtab) cd = csr_def(symtab) value_error "CSR number not knowable" if cd.nil? if symtab.archdef.fully_configured? - value_error "CSR is not implemented" unless symtab.archdef.implemented_csrs.any? { |icsr| icsr.name == cd.name } + value_error "CSR is not implemented" unless symtab.archdef.transitive_implemented_csrs.any? { |icsr| icsr.name == cd.name } else value_error "CSR is not defined" unless symtab.archdef.csrs.any? { |icsr| icsr.name == cd.name } end @@ -5960,7 +5988,8 @@ def type_check(symtab) csr.type_check(symtab) expression.type_check(symtab) - return if expression.type(symtab).kind == :bits && expression.type(symtab).width == symtab.mxlen + e_type = expression.type(symtab) + return if e_type.kind == :bits && ((e_type.width == :unknown || symtab.mxlen.nil?) || (e_type.width == symtab.mxlen)) type_error "CSR value must be an XReg" end @@ -6029,7 +6058,7 @@ def type(symtab) if csr_known?(symtab) Type.new(:bits, width: archdef.csr(csr.csr_name(symtab)).length(archdef)) else - Type.new(:bits, width: symtab.mxlen) + Type.new(:bits, width: symtab.mxlen.nil? ? :unknown : symtab.mxlen) end when "address" Type.new(:bits, width: 12) diff --git a/lib/idl/passes/prune.rb b/lib/idl/passes/prune.rb index d0065842f..165a0190b 100644 --- a/lib/idl/passes/prune.rb +++ b/lib/idl/passes/prune.rb @@ -220,16 +220,19 @@ def prune(symtab) BinaryExpressionAst.new(input, interval, lhs.prune(symtab), @op, rhs.prune(symtab)) end elsif op == "|" + rhs_type = rhs.type(symtab) + lhs_type = lhs.type(symtab) + if lhs_value == 0 # rhs idenntity rhs.prune(symtab) - elsif lhs_value == ((1 << rhs.type(symtab).width) - 1) + elsif rhs_type.width != :unknown && lhs_value == ((1 << rhs.type(symtab).width) - 1) # ~0 | anything == ~0 create_literal(lhs_value) elsif rhs_value == 0 # lhs identity lhs.prune(symtab) - elsif rhs_value == (1 << lhs.type(symtab).width - 1) + elsif lhs_type.width != :unknown && rhs_value == (1 << lhs.type(symtab).width - 1) # anything | ~0 == ~0 create_literal(rhs_value) else diff --git a/lib/idl/symbol_table.rb b/lib/idl/symbol_table.rb index 81cb4ae46..cc3b31830 100644 --- a/lib/idl/symbol_table.rb +++ b/lib/idl/symbol_table.rb @@ -14,7 +14,7 @@ def initialize(name, type, value = nil, decode_var: false, template_index: nil, @type = type @type.freeze @value = value - raise 'unexpected' unless decode_var.is_a?(TrueClass) || decode_var.is_a?(FalseClass) + raise "unexpected" unless decode_var.is_a?(TrueClass) || decode_var.is_a?(FalseClass) @decode_var = decode_var @template_index = template_index @@ -75,7 +75,7 @@ def value=(new_value) # scoped symbol table holding known symbols at a current point in parsing class SymbolTable - attr_reader :archdef + def archdef = @arch_def # @return [Integer] 32 or 64, the XLEN in M-mode attr_reader :mxlen @@ -86,12 +86,13 @@ class DuplicateSymError < StandardError def hash return @frozen_hash unless @frozen_hash.nil? - [@scopes.hash, @archdef.hash].hash + [@scopes.hash, @arch_def.hash].hash end def initialize(arch_def) - @archdef = arch_def - @mxlen = arch_def.mxlen + raise if arch_def.nil? + @arch_def = arch_def + @mxlen = arch_def.unconfigured? ? nil : arch_def.mxlen @callstack = [nil] @scopes = [{ "X" => Var.new( @@ -128,15 +129,11 @@ def initialize(arch_def) end end end + # now add all parameters, even those not implemented arch_def.params_without_value.each do |param| if param.exts.size == 1 - if param.name == "XLEN" - # special case: we actually do know XLEN - add!(param.name, Var.new(param.name, param.idl_type.clone.make_const, @mxlen)) - else - add!(param.name, Var.new(param.name, param.idl_type.clone.make_const)) - end + add!(param.name, Var.new(param.name, param.idl_type.clone.make_const)) else # could already be present... existing_sym = get(param.name) @@ -149,32 +146,6 @@ def initialize(arch_def) end end end - - # add the builtin extensions - # add!( - # "ExtensionName", - # EnumerationType.new( - # "ExtensionName", - # arch_def.extensions.map(&:name), - # Array.new(arch_def.extensions.size) { |i| i + 1 } - # ) - # ) - # add!( - # "ExceptionCode", - # EnumerationType.new( - # "ExceptionCode", - # arch_def.exception_codes.map(&:var), - # arch_def.exception_codes.map(&:num) - # ) - # ) - # add!( - # "InterruptCode", - # EnumerationType.new( - # "InterruptCode", - # arch_def.interrupt_codes.map(&:var), - # arch_def.interrupt_codes.map(&:num) - # ) - # ) end # do a deep freeze to protect the sym table and all its entries from modification @@ -186,16 +157,16 @@ def deep_freeze @scopes.freeze # set frozen_hash so that we can quickly compare symtabs - @frozen_hash = [@scopes.hash, @archdef.hash].hash + @frozen_hash = [@scopes.hash, @arch_def.hash].hash # set up the global clone that be used as a mutable table @global_clone_pool = [] - 5.times do + 5.times do copy = SymbolTable.allocate copy.instance_variable_set(:@scopes, [@scopes[0]]) copy.instance_variable_set(:@callstack, [@callstack[0]]) - copy.instance_variable_set(:@archdef, @archdef) + copy.instance_variable_set(:@arch_def, @arch_def) copy.instance_variable_set(:@mxlen, @mxlen) copy.instance_variable_set(:@global_clone_pool, @global_clone_pool) copy.instance_variable_set(:@in_use, false) @@ -208,7 +179,7 @@ def deep_freeze # @return [String] inspection string def inspect - "SymbolTable[#{@archdef.name}]#{frozen? ? ' (frozen)' : ''}" + "SymbolTable[#{@arch_def.name}]#{frozen? ? ' (frozen)' : ''}" end # pushes a new scope @@ -371,7 +342,7 @@ def global_clone copy = SymbolTable.allocate copy.instance_variable_set(:@scopes, [@scopes[0]]) copy.instance_variable_set(:@callstack, [@callstack[0]]) - copy.instance_variable_set(:@archdef, @archdef) + copy.instance_variable_set(:@arch_def, @arch_def) copy.instance_variable_set(:@mxlen, @mxlen) copy.instance_variable_set(:@global_clone_pool, @global_clone_pool) copy.instance_variable_set(:@in_use, false) diff --git a/lib/idl/tests/helpers.rb b/lib/idl/tests/helpers.rb index ca26eb4ab..85123e036 100644 --- a/lib/idl/tests/helpers.rb +++ b/lib/idl/tests/helpers.rb @@ -36,7 +36,7 @@ def name = "mock" module TestMixin def setup @archdef = MockArchDef.new - @symtab = Idl::SymbolTable.new(@archdef, 32) - @compiler = Idl::Compiler.new(@archdef) + @symtab = Idl::SymbolTable.new(@archdef) + @compiler = Idl::Compiler.new end -end \ No newline at end of file +end diff --git a/lib/resolver.rb b/lib/resolver.rb deleted file mode 100644 index 2427ff5b9..000000000 --- a/lib/resolver.rb +++ /dev/null @@ -1,20 +0,0 @@ -# given an architecture folder, resolves inheritance and expands some fields - -require "pathname" - -class Resolver - def initialize(arch_folder) - @dir = Pathname.new(arch_folder).realpath - end - - def resolve_all(output_folder) - Dir.glob(@dir / "**" / "*.yaml") do |f| - resolve(f, "#{output_folder}/#{f.gsub("#{@dir.to_s}/", "")}") - end - end - - def resolve(input_file, output_file) - obj = YamlLoader.load(input_file, permitted_classes: [Date]) - File.write(output_file, YAML::dump(obj)) - end -end \ No newline at end of file diff --git a/lib/specification.rb b/lib/specification.rb new file mode 100644 index 000000000..184030b4a --- /dev/null +++ b/lib/specification.rb @@ -0,0 +1,278 @@ +# frozen_string_literal: true + +require "active_support/inflector/methods" + +require "json" +require "json_schemer" +require "pathname" +require "yaml" + +require_relative "idl" + +require_relative "arch_obj_models/certificate" +require_relative "arch_obj_models/csr" +require_relative "arch_obj_models/csr_field" +require_relative "arch_obj_models/exception_code" +require_relative "arch_obj_models/extension" +require_relative "arch_obj_models/instruction" +require_relative "arch_obj_models/manual" +require_relative "arch_obj_models/portfolio" +require_relative "arch_obj_models/profile" + +# represents an entire RISC-V Specification. +# +# Could be either the standard spec (defined by RISC-V International) +# of a custom spec (defined as an overlay in cfgs/) +class Specification + # @return [Pathname] Path to the directory with the standard YAML files + attr_reader :path + + def initialize(arch_dir) + @arch_dir = Pathname.new(arch_dir) + raise "Arch directory not found: #{arch_dir}" unless @arch_dir.exist? + + @arch_dir = @arch_dir.realpath + @path = @arch_dir # alias + @objects ||= {} + @object_hashes ||= {} + end + + def validate(show_progress: true) + progressbar = ProgressBar.create(total: objs.size) if show_progress + + objs.each do |obj| + progressbar.increment if show_progress + obj.validate + end + end + + # @!macro [attach] generate_obj_methods + # @method $1s + # @return [Array<$3>] List of all $1s defined in the standard + # + # @method $1_hash + # @return [Hash] Hash of all $1s + # + # @method $1 + # @param name [String] The $1 name + # @return [$3] The $1 + # @return [nil] if there is no $1 named +name+ + def self.generate_obj_methods(fn_name, arch_dir, obj_class) + plural_fn = ActiveSupport::Inflector.pluralize(fn_name) + + define_method(plural_fn) do + return @objects[arch_dir] unless @objects[arch_dir].nil? + + @objects[arch_dir] = [] + @object_hashes[arch_dir] = {} + Dir.glob(@arch_dir / arch_dir / "**" / "*.yaml") do |obj_path| + obj_yaml = YAML.load_file(obj_path, permitted_classes: [Date]) + @objects[arch_dir] << obj_class.new(obj_yaml, Pathname.new(obj_path).realpath, arch: self) + @object_hashes[arch_dir][@objects[arch_dir].last.name] = @objects[arch_dir].last + end + @objects[arch_dir] + end + + define_method("#{fn_name}_hash") do + return @object_hashes[arch_dir] unless @object_hashes[arch_dir].nil? + + send(plural_fn) # create the hash + + @object_hashes[arch_dir] + end + + define_method(fn_name) do |name| + return @object_hashes[arch_dir][name] unless @object_hashes[arch_dir].nil? + + send(plural_fn) # create the hash + + @object_hashes[arch_dir][name] + end + end + + OBJS = [ + { + fn_name: "extension", + arch_dir: "ext", + klass: Extension + }, + { + fn_name: "instruction", + arch_dir: "inst", + klass: Instruction + }, + { + fn_name: "csr", + arch_dir: "csr", + klass: Csr + }, + { + fn_name: "cert_class", + arch_dir: "certificate_class", + klass: CertClass + }, + { + fn_name: "cert_model", + arch_dir: "certificate_model", + klass: CertModel + }, + { + fn_name: "manual", + arch_dir: "manual", + klass: Manual + }, + { + fn_name: "manual_version", + arch_dir: "manual_version", + klass: ManualVersion + }, + { + fn_name: "profile_release", + arch_dir: "profile_release", + klass: ProfileRelease + }, + { + fn_name: "profile_class", + arch_dir: "profile_class", + klass: ProfileClass + }, + { + fn_name: "profile", + arch_dir: "profile", + klass: Profile + } + ].freeze + + OBJS.each do |obj_info| + generate_obj_methods(obj_info[:fn_name], obj_info[:arch_dir], obj_info[:klass]) + end + + # @return [Array] All known objects + def objs + return @objs unless @objs.nil? + + @objs = [] + OBJS.each do |obj_info| + @objs.concat(send(ActiveSupport::Inflector.pluralize(obj_info[:fn_name]))) + end + @objs.freeze + end + + # @return [Array] Alphabetical list of all parameters defined in the architecture + def params + return @params unless @params.nil? + + @params = extensions.map(&:params).flatten.uniq(&:name).sort_by!(&:name) + end + + # @return [Hash] Hash of all extension parameters defined in the architecture + def params_hash + return @params_hash unless @params_hash.nil? + + @params_hash = {} + params.each do |param| + @params_hash[param.name] = param + end + @param_hash + end + + # @return [ExtensionParameter] Parameter named +name+ + # @return [nil] if there is no parameter named +name+ + def param(name) + params_hash[name] + end + + # @return [Array] All exception codes defined by the spec + def exception_codes + return @exception_codes unless @exception_codes.nil? + + @exception_codes = + extensions.reduce([]) do |list, ext_version| + ecodes = extension(ext_version.name)["exception_codes"] + next list if ecodes.nil? + + ecodes.each do |ecode| + # double check that all the codes are unique + raise "Duplicate exception code" if list.any? { |e| e.num == ecode["num"] || e.name == ecode["name"] || e.var == ecode["var"] } + + list << ExceptionCode.new(ecode["name"], ecode["var"], ecode["num"], self) + end + list + end + end + + # @return [Array] All interrupt codes defined by extensions + def interrupt_codes + return @interrupt_codes unless @interrupt_codes.nil? + + @interupt_codes = + extensions.reduce([]) do |list, ext_version| + icodes = extension(ext_version.name)["interrupt_codes"] + next list if icodes.nil? + + icodes.each do |icode| + # double check that all the codes are unique + if list.any? { |i| i.num == icode["num"] || i.name == icode["name"] || i.var == icode["var"] } + raise "Duplicate interrupt code" + end + + list << InterruptCode.new(icode["name"], icode["var"], icode["num"], self) + end + list + end + end + + # given a `$ref` target, return the Ruby object + # + # @params uri [String] JSON Reference pointer + # @return [Object] The pointed-to object + def ref(uri) + raise ArgumentError, "JSON Reference must contain one '#'" unless uri.count("#") == 1 + + file_path, obj_path = uri.split("#") + obj = + case file_path + when /^certificate_class.*/ + cert_class_name = File.basename(file_path, ".yaml") + cert_class(cert_class_name) + when /^certificate_model.*/ + cert_model_name = File.basename(file_path, ".yaml") + cert_model(cert_model_name) + when /^csr.*/ + csr_name = File.basename(file_path, ".yaml") + csr(csr_name) + when /^ext.*/ + ext_name = File.basename(file_path, ".yaml") + extension(ext_name) + when /^inst.*/ + inst_name = File.basename(file_path, ".yaml") + instruction(inst_name) + when /^manual.*/ + manual_name = File.basename(file_path, ".yaml") + manual(manual_name) + when /^manual_version.*/ + manual_name = File.basename(file_path, ".yaml") + manual_version(manual_name) + when /^profile_class.*/ + profile_class_name = File.basename(file_path, ".yaml") + profile_class(profile_class_name) + when /^profile_release.*/ + profile_release_name = File.basename(file_path, ".yaml") + profile_release(profile_release_name) + else + raise "Unhandled ref object: #{file_path}" + end + + if obj_path.nil? + obj + else + parts = obj_path.split("/") + parts.each do |part| + raise "Error in $ref. There is no method '#{part}' for a #{obj.class.name}" unless obj.respond_to?(part.to_sym) + + obj = obj.send(part) + end + obj + end + end +end diff --git a/lib/validate.rb b/lib/validate.rb deleted file mode 100644 index c2215b433..000000000 --- a/lib/validate.rb +++ /dev/null @@ -1,278 +0,0 @@ -# frozen_string_literal: true - -require_relative "yaml_loader" - -require "date" -require "json" -require "json_schemer" -require "pathname" -require "singleton" -require "yaml" - -$root = Pathname.new(__FILE__).dirname.dirname.realpath if $root.nil? - -# class used to validate schmeas and objects -class Validator - include Singleton - - # map of type to schema filesystem path - SCHEMA_PATHS = { - arch: $root / "schemas" / "arch_schema.json", - inst: $root / "schemas" / "inst_schema.json", - ext: $root / "schemas" / "ext_schema.json", - csr: $root / "schemas" / "csr_schema.json", - cfg_impl_ext: $root / "schemas" / "implemented_exts_schema.json", - manual_version: $root / "schemas" / "manual_version_schema.json", - cert_class: $root / "schemas" / "cert_class_schema.json" - }.freeze - - # types of objects that can be validated - TYPES = SCHEMA_PATHS.keys.freeze - - # Exception raised when there is a problem with a schema file - class SchemaError < ::StandardError - # result from JsonSchemer.validate - attr_reader :result - - def initialize(result) - if result.is_a?(Enumerator) - super(result.to_a.map { |e| "At #{e['schema_pointer']}: #{e['type']}" }) - else - super(result["error"]) - end - @result = result - end - end - - class ValidationError < ::StandardError - def initialize(why) - super(why) - end - end - - # exception raised when an object does not validate against its schema - class SchemaValidationError < ::StandardError - - # result from JsonSchemer.validate - attr_reader :result - - # create a new SchemaValidationError - # - # @param result [JsonSchemer::Result] JsonSchemer result - def initialize(path, result) - msg = "While validating #{path}:\n\n" - nerrors = result.count - msg << "#{nerrors} error(s) during validations\n\n" - result.to_a.each do |r| - msg << - if r["type"] == "required" && !r.dig("details", "missing_keys").nil? - " At '#{r['data_pointer']}': Missing required parameter(s) '#{r['details']['missing_keys']}'\n" - elsif r["type"] == "schema" - if r["schema_pointer"] == "/additionalProperties" - " At #{r['data_pointer']}, there is an unallowed additional key\n" - else - " At #{r['data_pointer']}, endpoint is an invalid key\n" - end - elsif r["type"] == "enum" - " At #{r['data_pointer']}, '#{r['data']}' is not a valid enum value (#{r['schema']['enum']})\n" - elsif r["type"] == "maxProperties" - " Maximum number of properties exceeded\n" - elsif r["type"] == "object" - " At #{r['data_pointer']}, Expecting object, got #{r['data']}\n" - elsif r["type"] == "pattern" - " At #{r['data_pointer']}, RegEx validation failed; '#{r['data']}' does not match '#{r['schema']['pattern']}'\n" - elsif r["type"] == "integer" - " At #{r['data_pointer']}, '#{r['data']}' is not a integer\n" - elsif r["type"] == "array" - " At #{r['data_pointer']}, '#{r['data']}' is not a array\n" - elsif r["type"] == "oneOf" - " At #{r['data_pointer']}, '#{r['data']}' matches more than one of #{r['schema']['oneOf']}\n" - elsif r["type"] == "const" - " At #{r['data_pointer']}, '#{r['data']}' does not match required value '#{r['schema']['const']}'\n" - else - " #{r}\n\n" - end - end - msg << "\n" - # msg << result.to_a.to_s - super(msg) - @result = result - end - end - - # initialize a new Validator - # - # @raise [SchemaError] if a schema is ill-formed - def initialize - @schemas = {} - SCHEMA_PATHS.each do |type, path| - # resolve refs as a relative path from the schema file - ref_resolver = proc do |pattern| - if pattern.to_s =~ /^http/ - JSON.parse(Net::HTTP.get(pattern)) - else - JSON.load_file($root / "schemas" / pattern.to_s) - end - end - - @schemas[type] = - JSONSchemer.schema( - path.read, - regexp_resolver: "ecma", - ref_resolver:, - insert_property_defaults: true - ) - raise SchemaError, @schemas[type].validate_schema unless @schemas[type].valid_schema? - end - end - - # validate a YAML string of a given type - # - # @return [Object] The object represented by str - # @param str [String] A YAML document - # @param type [Symbol] Type of the object (One of TYPES) - # @raise [SchemaValidationError] if the str is not valid against the type schema - # @see TYPES - def validate_str(str, type: nil, schema_path: nil, path: nil) - raise "Invalid type #{type}" unless TYPES.any?(type) || !schema_path.nil? - - begin - obj = YAML.safe_load(str, permitted_classes: [Symbol, Date]) - rescue Psych::SyntaxError => e - warn "While parsing: #{str}\n\n" - raise e - end - # convert through JSON to handle anything supported in YAML but not JSON - # (e.g., integer object keys will be coverted to strings) - jsonified_obj = JSON.parse(JSON.generate(obj)) - - raise "Nothing there?" if jsonified_obj.nil? - - schema = - if schema_path.nil? - @schemas[type] - else - # resolve refs as a relative path from the schema file - ref_resolver = proc do |pattern| - JSON.load_file(schema_path.dirname / pattern.to_s) - end - JSONSchemer.schema( - schema_path.read, - regexp_resolver: "ecma", - ref_resolver:, - insert_property_defaults: true - ) - end - - raise SchemaValidationError.new(path, schema.validate(jsonified_obj)) unless schema.valid?(jsonified_obj) - - jsonified_obj - end - - # validate a YAML file - # - # The type of the file is infered from its path unless type is provided - # - # @param path [#to_s] Path to a YAML document - # @param type [Symbol] Type of the object (One of TYPES). If nil, type will be inferred from path - # @raise [SchemaValidationError] if the str is not valid against the type schema - # @see TYPES - def validate(path, type: nil) - schema_path = nil - if type.nil? - case path.to_s - when %r{.*cfgs/([^/]+)/params\.yaml} - cfg_name = $1.to_s - type = :cfg_params - schema_path = $root / "gen" / cfg_name / "schemas" / "params_schema.json" - when %r{.*cfgs/[^/]+/implemented_exts\.yaml$} - type = :cfg_impl_ext - when %r{.*arch/arch_def\.yaml$} - type = :arch - when %r{.*arch/inst/.*/.*\.yaml$} - type = :inst - when %r{.*arch/ext/.*\.yaml$} - type = :ext - when %r{.*arch/csr/.*\.yaml$} - type = :csr - when %r{.*arch/manual/.*/.*contents\.yaml$} - type = :manual_version - when %r{.*arch/certificate_class/.*\.yaml$} - type = :cert_class - else - warn "Cannot determine type from YAML path '#{path}'; skipping" - return - end - end - begin - obj = validate_str(File.read(path.to_s), path:, type:, schema_path:) - - # check that the name matches the filename - if [:inst, :csr, :ext, :cert_class].include?(type) && obj["name"] != File.basename(path, ".yaml").to_s - raise ValidationError, "In #{path}, object name '#{obj.keys.first}' does not match filename '#{File.basename(path)}'" - end - obj - rescue Psych::SyntaxError => e - warn "While parsing #{path}" - raise e - end - end - - def ary_from_location(location_str_or_int) - return [location_str_or_int] if location_str_or_int.is_a?(Integer) - - bits = [] - parts = location_str_or_int.split("|") - parts.each do |part| - if part.include?("-") - msb, lsb = part.split("-").map(&:to_i) - (lsb..msb).each { |i| bits << i } - else - bits << part.to_i - end - end - bits - end - - def validate_instruction_encoding(inst_name, encoding) - match = encoding["match"] - raise "No match for instruction #{inst_name}?" if match.nil? - - variables = encoding["variables"] - match.size.times do |i| - if match[match.size - 1 - i] == "-" - # make sure exactly one variable covers this bit - vars_match = variables.count { |variable| ary_from_location(variable["location"]).include?(i) } - if vars_match.zero? - raise ValidationError, "In instruction #{inst_name}, no variable or encoding bit covers bit #{i}" - elsif vars_match != 1 - raise ValidationError, "In instruction, #{inst_name}, bit #{i} is covered by more than one variable" - end - else - # make sure no variable covers this bit - unless variables.nil? - unless variables.none? { |variable| ary_from_location(variable["location"]).include?(i) } - raise ValidationError, "In instruction, #{inst_name}, bit #{i} is covered by both a variable and the match string" - end - end - end - end - end - - # @param path [Pathname] Path to an instruction YAML document - # @raise [ValidateError] if there is a problem with the instruction defintion - def validate_instruction(path) - obj = YamlLoader.load(path) - raise "Invalid instruction definition: #{obj}" unless obj.is_a?(Hash) - - inst_name = path.basename('.yaml').to_s - raise "Invalid instruction definition: #{inst_name} #{obj}" unless obj["name"] == inst_name - - if (obj["encoding"]["RV32"].nil?) - validate_instruction_encoding(inst_name, obj["encoding"]) - else - validate_instruction_encoding(inst_name, obj["encoding"]["RV32"]) - validate_instruction_encoding(inst_name, obj["encoding"]["RV64"]) - end - end -end diff --git a/lib/version.rb b/lib/version.rb new file mode 100644 index 000000000..d423aba38 --- /dev/null +++ b/lib/version.rb @@ -0,0 +1,157 @@ +# frozen_string_literal: true + +# represents an RVI version specifier +class VersionSpec + include Comparable + + # MAJOR[.MINOR[.PATCH[-pre]]] + VERSION_REGEX = /([0-9]+)(?:\.([0-9]+)(?:\.([0-9]+)(?:-(pre))?)?)?/ + + # @return [Integer] Major version number + attr_reader :major + + # @return [Integer] Minor version number + attr_reader :minor + + # @return [Integer] Patch version number + attr_reader :patch + + # @return [Boolean] Whether or not this is a pre-release + attr_reader :pre + + def initialize(version_str) + if version_str =~ /^\s*#{VERSION_REGEX}\s*$/ + m = ::Regexp.last_match + @major = m[1].to_i + @minor_given = !m[2].nil? + @minor = @minor_given ? m[2].to_i : 0 + @patch_given = !m[3].nil? + @patch = @patch_given ? m[3].to_i : 0 + @pre = !m[4].nil? + else + raise ArgumentError, "#{version_str} is not a valid Version spec" + end + @version_str = version_str + end + + def inspect + "VersionSpec[str: #{@version_str}; major: #{@major}, minor: #{@minor}, patch: #{@patch}, pre: #{@pre}]" + end + + # @return [String] The version, in canonical form + def canonical + "#{@major}.#{@minor}.#{@patch}#{@pre ? '-pre' : ''}" + end + + # @return [String] The version formatted like RVI docs + # + # @example + # VersionSpec.new("2.2").to_rvi_s #=> "2p2" + def to_rvi_s + s = @major.to_s + s += "p#{@minor}" if @minor_given + s += "p#{@patch}" if @patch_given + s += "-pre" if @pre + s + end + + # @return [String] The exact string used during construction + def to_s = @version_str + + def <=>(other) + if other.is_a?(String) + VersionSpec.new(other) <=> self + elsif other.is_a?(VersionSpec) + if @major != other.major + @major <=> other.major + elsif @minor != other.minor + @minor <=> other.minor + elsif @patch != other.patch + @patch <=> other.patch + elsif @pre != other.pre + @pre ? 1 : -1 + else + 0 + end + else + raise ArgumentError, "Cannot compare VersionSpec with #{other.class.name}" + end + end + + # @param other [VersionSpec] Comparison + # @return [Boolean] Whether or not +other+ is an VersionSpec with the same canonical version + def eql?(other) + if other.is_a?(String) + eql?(ExtensionVersion.new(other)) + elsif other.is_a?(VersionSpec) + other.major == @major && \ + other.minor == @minor && \ + other.patch == @patch && \ + other.pre == @pre + else + raise ArgumentError, "Cannot compare VersionSpec with #{other.class.name}" + end + end +end + +# A requirement +class RequirementSpec + REQUIREMENT_OP_REGEX = /((?:>=)|(?:>)|(?:~>)|(?:<)|(?:<=)|(?:!=)|(?:=))/ + REQUIREMENT_REGEX = /#{REQUIREMENT_OP_REGEX}\s*(#{VersionSpec::VERSION_REGEX})/ + + # @param requirement [String] A requirement string + def initialize(requirement) + unless requirement.is_a?(String) + raise ArgumentError, "requirement must be a string (is a #{requirement.class.name})" + end + + if requirement =~ /^\s*#{REQUIREMENT_REGEX}\s*$/ + m = ::Regexp.last_match + @op = m[1] + @version_str = m[2] + @version_spec = VersionSpec.new(@version_str) + else + raise ArgumentError, "Bad requirement string '#{requirement}'" + end + end + + def to_s + "#{@op} #{@version_str}" + end + + # @param version [String] A version string + # @param version [VersionSpec] A version spec + # @param ext [Extension] An extension, needed to evaluate the compatible (~>) operator + # @return [Boolean] if the version satisfies the requirement + def satisfied_by?(version, ext) + v_spec = + case version + when String + VersionSpec.new(version) + when VersionSpec + version + else + raise ArgumentError, "satisfied_by? expects a String or VersionSpec (got #{version.class.name})" + end + + case @op + when ">=" + v_spec >= @version_spec + when ">" + v_spec > @version_spec + when "<=" + v_spec <= @version_spec + when "<" + v_spec < @version_spec + when "=" + v_spec == @version_spec + when "!=" + v_spec != @version_spec + when "~>" + matching_ver = ext.versions.find { |v| v.version_spec == v_spec } + raise "Can't find version?" if matching_ver.nil? + + matching_ver.compatible?(ExtensionVersion.new(ext.name, v_spec.to_s, ext.arch_def)) + end + end +end diff --git a/lib/yaml_loader.rb b/lib/yaml_loader.rb deleted file mode 100644 index 06f994865..000000000 --- a/lib/yaml_loader.rb +++ /dev/null @@ -1,215 +0,0 @@ -# frozen_string_literal: true - -require "pathname" -require "yaml" - -# loads a YAML file and expands any $ref/$inherits references -class YamlLoader - @cache = {} - - class DereferenceError < StandardError; end - - def self.expand(filename, obj, yaml_opts = {}) - return obj unless obj.is_a?(Hash) || obj.is_a?(Array) - - return obj.map { |v| expand(filename, v, yaml_opts) } if obj.is_a?(Array) - - new_obj = - if obj.keys.include?("$ref") - # according JSON Reference, all keys except $ref are ignored - relative_path = obj["$ref"].split("#")[0] - if relative_path.empty? - # this is a reference in the same document - obj_doc = YAML.load_file(filename, **yaml_opts) - obj_path = obj["$ref"].split("#")[1].split("/")[1..] - target_obj = obj_doc.dig(*obj_path) - raise DereferenceError, "#{obj['$ref']} cannot be found" if target_obj.nil? - - ref = expand(filename, target_obj, yaml_opts) - if ref.nil? - raise DereferenceError, "JSON Path #{obj['$ref'].split('#')[1]} does not exist in #{filename}" - end - - { "$ref" => obj["$ref"] } # ignore any other keys that might exist - else - target_filename = - if File.exist?(File.join(filename.dirname, relative_path)) - File.realpath(File.join(filename.dirname, relative_path)) - elsif File.exist?(File.join($root, 'arch', relative_path)) - File.join($root, 'arch', relative_path) - else - raise DereferenceError, "#{relative_path} cannot be found" - end - - obj_doc = YamlLoader.load(target_filename, yaml_opts) - file_path, obj_path = obj["$ref"].split("#") - target_obj = - if obj_path.nil? - obj_doc - else - obj_doc.dig(*(obj_path.split("/")[1..])) - - end - raise "#{obj['$ref']} cannot be found" if target_obj.nil? - - ref = expand(target_filename, target_obj, yaml_opts) - if ref.nil? - raise DereferenceError, "JSON Path #{obj['$ref'].split('#')[1]} does not exist in #{target_filename}" - end - - { "$ref" => obj["$ref"] } # ignore any other keys that might exist - end - elsif obj.keys.include?("$inherits") - # we handle the inherits key first so that any override will take priority - inherits = obj["$inherits"] - raise ArgumentError, "Missing reference after $inherits (did you forget to put a relative reference in quotes?)" if inherits.nil? - inherits_targets = inherits.is_a?(String) ? [inherits] : inherits - - new_obj = {} - - inherits_targets.each do |inherits_target| - relative_path = inherits_target.split("#")[0] - target_obj = - if relative_path.empty? - YAML.load_file(filename, **yaml_opts) - else - target_filename = - if File.exist?(File.join(filename.dirname, relative_path)) - File.realpath(File.join(filename.dirname, relative_path)) - elsif File.exist?(File.join($root, 'arch', relative_path)) - File.join($root, 'arch', relative_path) - else - raise DereferenceError, "#{relative_path} cannot be found" - end - - unless File.exist?(target_filename) - raise DereferenceError, "While locating $inherits in #{filename}, #{target_filename} does not exist" - end - - YamlLoader.load(target_filename, yaml_opts) - end - - inherits_target_suffix = inherits_target.split("#/")[1] - inherits_target_path = inherits_target_suffix.split("/") - begin - target_obj = target_obj.dig(*inherits_target_path) - rescue TypeError => e - if e.message == "no implicit conversion of String into Integer" - warn "$inherits: \"#{inherits_target}\" found in file #{filename} references an Array but needs to reference a Hash" - end - raise e - end - - raise DereferenceError, "JSON Path #{inherits_target_suffix} in file #{filename} does not exist in #{relative_path}" if target_obj.nil? - raise ArgumentError, "$inherits: \"#{inherits_target}\" in file #{filename} references a #{target_obj.class} but needs to reference a Hash" unless target_obj.is_a?(Hash) - - target_obj = expand(filename, target_obj, yaml_opts) - target_obj.each do |target_key, target_value| - if (new_obj[target_key].is_a?(Hash)) - raise "Should be a hash" unless target_value.is_a?(Hash) - new_obj[target_key] = target_value.merge(new_obj[target_key]) - else - new_obj[target_key] = target_value - end - end - end - - obj.delete("$inherits") - # now merge target_obj and obj - keys = (obj.keys + new_obj.keys).uniq - final_obj = {} - keys.each do |key| - if !obj.key?(key) - final_obj[key] = new_obj[key] - elsif !new_obj.key?(key) - final_obj[key] = expand(filename, obj[key], yaml_opts) - else - value = obj[key] - - if new_obj[key].is_a?(Hash) - raise "should be a hash" unless new_obj[key].is_a?(Hash) - final_obj[key] = new_obj[key].merge(obj[key]) - else - final_obj[key] = expand(filename, obj[key], yaml_opts) - end - end - end - - final_obj - elsif obj.keys.include?("$copy") - self.get_copy_target_obj(filename, obj["$copy"], yaml_opts) - else - # Go through each hash entry. - obj.each do |key, value| - obj[key] = expand(filename, value, yaml_opts) - end - obj - end - - if new_obj.is_a?(Hash) - obj_keys = new_obj.keys - if obj_keys.include? "$remove" - remove_keys = obj["$remove"].is_a?(Array) ? obj["$remove"] : [obj["$remove"]] - remove_keys.each do |key| - new_obj.delete(key) - end - end - new_obj.delete("$remove") - end - - new_obj - end - - # @param filename [String,Pathname] path to the YAML file - # @param copy_target [String] - # @param yaml_opts [Hash] options to pass to YAML.load_file - # @return [Object] - def self.get_copy_target_obj(filename, copy_target, yaml_opts) - relative_path = copy_target.split("#")[0] - if relative_path.empty? - # this is a reference in the same document - obj_doc = YAML.load_file(filename, **yaml_opts) - obj_path = copy_target.split("#")[1].split("/")[1..] - target_obj = obj_doc.dig(*obj_path) - raise DereferenceError, "$copy: #{obj_path} referenced to same file cannot be found in file #{filename}" if target_obj.nil? - - ref = expand(filename, target_obj, yaml_opts) - if ref.nil? - raise DereferenceError, "$copy: JSON Path #{obj_path} referenced to same file does not exist in file #{filename}" - end - - ref - else - target_filename = File.realpath(File.join(filename.dirname, relative_path)) - - obj_doc = YamlLoader.load(target_filename, yaml_opts) - obj_path = copy_target.split("#")[1].split("/")[1..] - target_obj = obj_doc.dig(*obj_path) - raise DereferenceError, "$copy: #{obj_path} referenced from file #{filename} cannot be found in file #{target_filename}" if target_obj.nil? - - ref = expand(target_filename, target_obj, yaml_opts) - if ref.nil? - raise DereferenceError, "$copy: JSON Path #{obj_path} referenced from file #{filename} does not exist in file #{target_filename}" - end - - ref - end - end - - # load a YAML file and expand any $ref/$inherits references - # @param filename [String,Pathname] path to the YAML file - # @param yaml_opts [Hash] options to pass to YAML.load_file - # @return [Object] the loaded YAML file - def self.load(filename, yaml_opts = {}) - filename = Pathname.new(filename) - raise ArgumentError, "Cannot find file #{filename}" unless filename.exist? - - filename = filename.realpath - return @cache[filename] if @cache.key?(filename) - - obj = YAML.load_file(filename, **yaml_opts) - obj = expand(filename, obj, yaml_opts) if obj.is_a?(Hash) - - # @cache[filename] = obj - end -end diff --git a/lib/yaml_resolver.py b/lib/yaml_resolver.py index 47e6c87f3..3cadfe635 100644 --- a/lib/yaml_resolver.py +++ b/lib/yaml_resolver.py @@ -1,28 +1,179 @@ import glob, os +import argparse +import shutil +import json + +from pathlib import Path from copy import deepcopy from tqdm import tqdm from ruamel.yaml import YAML from mergedeep import merge, Strategy +from jsonschema import Draft7Validator, validators +from jsonschema.exceptions import best_match +from jsonschema.exceptions import ValidationError + +from referencing import Registry, Resource +from referencing.exceptions import NoSuchResource + +# cahce of Schema valiators +schemas = {} + +SCHEMAS_PATH = Path(os.path.join(os.path.dirname(os.path.dirname(__file__)), "schemas")) + +def retrieve_from_filesystem(uri: str): + path = SCHEMAS_PATH / Path(uri) + contents = json.loads(path.read_text()) + return Resource.from_contents(contents) + +registry = Registry(retrieve=retrieve_from_filesystem) + +# extend the validator to support default values +# https://python-jsonschema.readthedocs.io/en/stable/faq/#why-doesn-t-my-schema-s-default-property-set-the-default-on-my-instance +def extend_with_default(validator_class): + """Extends the jsonschema validator to support default values. + + Parameters + ---------- + validator_class : jsonschema.Draft7Validator + The validator class to extend. + + Returns + ------- + jsonschema.Draft7Validator + The extended validator class that will fill in default values + """ + + validate_properties = validator_class.VALIDATORS["properties"] + + + def set_defaults(validator, properties, instance, schema): + for property, subschema in properties.items(): + if not isinstance(subschema, dict): + continue + if "default" in subschema: + instance.setdefault(property, subschema["default"]) + + for error in validate_properties( + validator, properties, instance, schema, + ): + yield error + + return validators.extend( + validator_class, {"properties" : set_defaults}, + ) + + +DefaultValidatingValidator = extend_with_default(Draft7Validator) -OUT_DIR="arch_resolved" UDB_ROOT=os.path.dirname(os.path.dirname(os.path.realpath(__file__))) yaml = YAML(typ="rt") yaml.default_flow_style = False yaml.preserve_quotes = True +def _merge_patch(base: dict, patch: dict, path_so_far = []) -> None: + """merges patch into base according to JSON Merge Patch (RFC 7386) + + Parameters + ---------- + base : dict + The base object, which will be altered by the patch + patch : dict + The patch object + path_so_far : list + The current dict key path within patch + """ -def read_yaml(file_path): + patch_obj = patch if len(path_so_far) == 0 else dig(patch, *path_so_far) + for key, patch_value in patch_obj.items(): + if isinstance(patch_value, dict): + # continue to dig + _merge_patch(base, patch, (path_so_far + [key])) + else: + base_ptr = dig(base, *path_so_far) + base_value = dig(base_ptr, key) + if patch_value == None: + # remove from base, if it exists + if base_value != None: + base_ptr.pop(key) + else: + if base_ptr == None: + # add or overwrite value in base + base_ptr = base + for k in path_so_far: + if not k in base_ptr: + base_ptr[k] = {} + base_ptr = base_ptr[k] + base_ptr = dig(base, *path_so_far) + base_ptr[key] = patch_value + +def json_merge_patch(base_obj: dict, patch: dict) -> dict: + """merges patch into base according to JSON Merge Patch (RFC 7386) + + Parameters + ---------- + base : dict + The base object, which will be altered by the patch + patch : dict + The patch object + + Returns + ------- + dict + base_obj, now with the patch applied + """ + _merge_patch(base_obj, patch, []) + return base_obj + +def read_yaml(file_path : str | Path): + """Read a YAML file from file_path and return the parsed content + + Parameters + ---------- + file_path : str, Path + Filesystem path to the YAML file + + Returns + ------- + dict, list + The object represented in the YAML file + """ with open(file_path, 'r') as file: data = yaml.load(file) return data -def write_yaml(file_path, data): +def write_yaml(file_path : str | Path, data): + """Write data as YAML to file_path + + Parameters + ---------- + file_path : str, Path + Filesystem path to the YAML file + data : dict, list + The object to write as YAML + """ with open(file_path, 'w') as file: yaml.dump(data, file) -def dig(obj, *keys): +def dig(obj : dict, *keys): + """Digs data out of dictionary obj + + Parameters + ---------- + obj : dict + A dictionary + *keys + A list of obj keys + + Returns + ------- + Any + The value of obj[keys[0]][keys[1]]...[keys[-1]] + """ + if obj == None: + return None + if len(keys) == 0: return obj @@ -31,18 +182,34 @@ def dig(obj, *keys): if len(keys) == 1: return next_obj else: + if not isinstance(next_obj, dict): + raise ValueError(f"Not a hash: {keys}") return dig(next_obj, *keys[1:]) except KeyError: return None resolved_objs = {} -def resolve(path, rel_path, arch_root): - if path in resolved_objs: - return resolved_objs[path] +def resolve(rel_path : str | Path, arch_root : str | Path) -> dict: + """Resolve the file at arch_root/rel_path by expanding operators and applying defaults + + Parameters + ---------- + rel_path : str, Path + The relative path to the file to resolve + arch_root : str, Path + The root of the architecture + + Returns + ------- + dict + The resolved object + """ + if str(rel_path) in resolved_objs: + return resolved_objs[str(rel_path)] else: - unresolved_data = read_yaml(path) - resolved_objs[path] = _resolve(unresolved_data, [], rel_path, unresolved_data, arch_root) - return resolved_objs[path] + unresolved_arch_data = read_yaml(os.path.join(arch_root, rel_path)) + resolved_objs[str(rel_path)] = _resolve(unresolved_arch_data, [], rel_path, unresolved_arch_data, arch_root) + return resolved_objs[str(rel_path)] def _resolve(obj, obj_path, obj_file_path, doc_obj, arch_root): if not (isinstance(obj, list) or isinstance(obj, dict)): @@ -57,7 +224,7 @@ def _resolve(obj, obj_path, obj_file_path, doc_obj, arch_root): inherits_targets = [obj["$inherits"]] if isinstance(obj["$inherits"], str) else obj["$inherits"] obj["$child_of"] = obj["$inherits"] - new_obj = yaml.load("{}") + parent_obj = yaml.load("{}") for inherits_target in inherits_targets: ref_file_path = inherits_target.split("#")[0] @@ -75,57 +242,215 @@ def _resolve(obj, obj_path, obj_file_path, doc_obj, arch_root): # this is a reference to another doc if not os.path.exists(os.path.join(UDB_ROOT, arch_root, ref_file_path)): raise ValueError(f"{ref_file_path} does not exist in {arch_root}/") - ref_file_full_path = os.path.join(UDB_ROOT, arch_root, ref_file_path) - ref_doc_obj = resolve(ref_file_full_path, ref_file_path, arch_root) + ref_doc_obj = resolve(ref_file_path, arch_root) ref_obj = dig(ref_doc_obj, *ref_obj_path) ref_obj = _resolve(ref_obj, ref_obj_path, ref_file_path, ref_doc_obj, arch_root) for key in ref_obj: - if isinstance(new_obj.get(key), dict): - merge(new_obj[key], ref_obj, strategy=Strategy.REPLACE) + if isinstance(parent_obj.get(key), dict): + merge(parent_obj[key], ref_obj[key], strategy=Strategy.REPLACE) else: - new_obj[key] = deepcopy(ref_obj[key]) + parent_obj[key] = deepcopy(ref_obj[key]) - print(f"{obj_file_path} {obj_path} inherits {ref_file_path} {ref_obj_path}") - ref_obj["$parent_of"] = f"{obj_file_path}#/{"/".join(obj_path)}" + if "$parent_of" in ref_obj: + if isinstance(ref_obj["$parent_of"], list): + ref_obj["$parent_of"].append(f"{obj_file_path}#/{"/".join(obj_path)}") + else: + ref_obj["$parent_of"] = [ref_obj["$parent_of"], f"{obj_file_path}#/{"/".join(obj_path)}"] + else: + ref_obj["$parent_of"] = f"{obj_file_path}#/{"/".join(obj_path)}" del obj["$inherits"] - # now new_obj is the child and obj is the parent + # now parent_obj is the child and obj is the parent # merge them keys = [] for key in obj.keys(): keys.append(key) - for key in new_obj.keys(): + for key in parent_obj.keys(): if keys.count(key) == 0: keys.append(key) final_obj = yaml.load('{}') for key in keys: if not key in obj: - final_obj[key] = new_obj[key] - elif not key in new_obj: + final_obj[key] = parent_obj[key] + elif not key in parent_obj: final_obj[key] = _resolve(obj[key], obj_path + [key], obj_file_path, doc_obj, arch_root) else: - if isinstance(new_obj[key], dict): - if not isinstance(new_obj[key], dict): - raise ValueError("should be a hash") - final_obj[key] = merge(yaml.load('{}'), new_obj[key], obj[key], strategy=Strategy.REPLACE) + if isinstance(parent_obj[key], dict): + final_obj[key] = merge(yaml.load('{}'), parent_obj[key], obj[key], strategy=Strategy.REPLACE) else: final_obj[key] = _resolve(obj[key], obj_path + [key], obj_file_path, doc_obj, arch_root) + if "$remove" in final_obj: + if final_obj["$remove"] in final_obj: + del final_obj[final_obj["$remove"]] + del final_obj["$remove"] return final_obj else: for key in obj: obj[key] = _resolve(obj[key], obj_path + [key], obj_file_path, doc_obj, arch_root) + if "$remove" in obj: + if obj["$remove"] in obj: + del obj[obj["$remove"]] + del obj["$remove"] + return obj + +def merge_file(rel_path : str | Path, arch_dir : str | Path, overlay_dir : str | Path | None, merge_dir : str | Path) -> None: + """ pick the right file(s) to merge, and write the result to merge_dir + + Parameters + ---------- + rel_path : str, Path + Relative path, from arch_dir, to base file + arch_dir : str, Path + Absolute path to arch dir with base files + overlay_dir : str, Path, None + Absolute path to overlay dir with overlay files + merge_dir : str, Path + Absolute path to merge dir, where the merged file will be written + """ + arch_path = overlay_path = None + + if arch_dir != None: + arch_path = os.path.join(arch_dir, rel_path) + if overlay_dir != None: + overlay_path = os.path.join(overlay_dir, rel_path) + merge_path = os.path.join(merge_dir, rel_path) + if not os.path.exists(arch_path) and (overlay_path == None or not os.path.exists(overlay_path)): + # neither exist + if not os.path.exists(merge_path): + raise "Script error: no path exists" + + # remove the merged file + os.remove(merge_path) + elif overlay_path == None or not os.path.exists(overlay_path): + if arch_path == None: + raise "Must supply with arch_path or overlay_path" + + # no overlay, just copy arch + if not os.path.exists(merge_path) or (os.path.getmtime(arch_path) > os.path.getmtime(merge_path)): + shutil.copyfile(os.path.join(arch_dir, rel_path), merge_path) + elif not os.path.exists(arch_path): + if overlay_path == None or not os.path.exists(overlay_path): + raise "Must supply with arch_path or overlay_path" + + # no arch, just copy overlay + if not os.path.exists(merge_path) or (os.path.getmtime(overlay_path) > os.path.getmtime(merge_path)): + shutil.copyfile(os.path.join(overlay_dir, rel_path), merge_path) + else: + # both exist, merge + if not os.path.exists(merge_path) or (os.path.getmtime(overlay_path) > os.path.getmtime(merge_path)) or (os.path.getmtime(arch_path) > os.path.getmtime(merge_path)): + arch_obj = read_yaml(os.path.join(arch_dir, rel_path)) + overlay_obj = read_yaml(os.path.join(overlay_dir, rel_path)) + + write_yaml(os.path.join(merge_dir, rel_path), json_merge_patch(arch_obj, overlay_obj)) + +class SchemaNotFoundException(Exception): + pass + +def _get_schema(uri): + rel_path= uri.split("#")[0] + + if rel_path in schemas: + return schemas[rel_path] + + abs_path = os.path.join(SCHEMAS_PATH, rel_path) + if not os.path.exists(abs_path): + raise SchemaNotFoundException(f"Schema not found: {uri}") + + # Open the JSON file + with open(abs_path, 'r') as f: + # Load the JSON data into a Python dictionary + schema_obj = json.load(f) + + schemas[rel_path] = DefaultValidatingValidator(schema_obj, registry=registry) + return schemas[rel_path] + + +def resolve_file(rel_path : str | Path, arch_dir: str | Path, resolved_dir: str | Path): + """Read object at arch_dir/rel_path, resolve it, and write it as YAML to resolved_dir/rel_path + + Parameters + ---------- + rel_path : str | Path + Path to file relative to arch_dir + arch_dir : str | Path + Absolute path to arch directory + resolved_dir : str | Path + Directory to write the resolved file to + """ + arch_path = os.path.join(arch_dir, rel_path) + resolved_path = os.path.join(resolved_dir, rel_path) + if not os.path.exists(arch_path): + if os.path.exists(resolved_path): + os.remove(resolved_path) + elif not os.path.exists(resolved_path) or (os.path.getmtime(arch_path) > os.path.getmtime(resolved_path)) or (os.path.getmtime(__file__) > os.path.getmtime(resolved_path)): + if os.path.exists(resolved_path): + os.remove(resolved_path) + resolved_obj = resolve(rel_path, args.arch_dir) + resolved_obj["$source"] = os.path.join(args.arch_dir, rel_path) + + if "$schema" in resolved_obj: + schema = _get_schema(resolved_obj["$schema"]) + try: + schema.validate(instance=resolved_obj) + except ValidationError as e: + print(f"JSON Schema Validation Error for {rel_path}:") + print(best_match(schema.iter_errors(resolved_obj)).message) + exit(1) + + write_yaml(resolved_path, resolved_obj) + os.chmod(resolved_path, 0o444) + +if __name__ == '__main__': + cmdparser = argparse.ArgumentParser( + prog="yaml_resolver.py", + description="Resolves/overlays UDB architecture YAML files") + subparsers = cmdparser.add_subparsers(dest='command', help='sub-command help') + merge_parser = subparsers.add_parser('merge', help='Merge overlay on top of architecture files') + merge_parser.add_argument("arch_dir", type=str, help="Unresolved architecture (input) directory") + merge_parser.add_argument("overlay_dir", type=str, help="Overlay directory") + merge_parser.add_argument("merged_dir", type=str, help="Merged architecture (output) directory") + + all_parser = subparsers.add_parser('resolve', help='Resolve all architecture files') + all_parser.add_argument("arch_dir", type=str, help="Unresolved architecture (input) directory") + all_parser.add_argument("resolved_dir", type=str, help="Resolved architecture (output) directory") + + args = cmdparser.parse_args() + + if args.command == 'merge': + arch_paths = glob.glob(f"**/*.yaml", recursive=True, root_dir=args.arch_dir) + if args.overlay_dir != None: + overlay_paths = glob.glob(f"**/*.yaml", recursive=True, root_dir=args.overlay_dir) + arch_paths.extend(overlay_paths) + arch_paths = list(set(arch_paths)) + merged_paths = glob.glob(f"**/*.yaml", recursive=True, root_dir=args.merged_dir) + arch_paths.extend(merged_paths) + arch_paths = list(set(arch_paths)) + + for arch_path in tqdm(arch_paths, ascii=True, desc="Merging arch"): + merged_arch_path = f"{UDB_ROOT}/{args.merged_dir}/{arch_path}" + os.makedirs(os.path.dirname(merged_arch_path), exist_ok=True) + merge_file(arch_path, args.arch_dir, args.overlay_dir, args.merged_dir) + + print(f"[INFO] Merged architecture files written to {args.merged_dir}") + + elif args.command == 'resolve': + arch_paths = glob.glob(f"**/*.yaml", recursive=True, root_dir=args.arch_dir) + if os.path.exists(args.resolved_dir): + resolved_paths = glob.glob(f"**/*.yaml", recursive=True, root_dir=args.resolved_dir) + arch_paths.extend(resolved_paths) + arch_paths = list(set(arch_paths)) + for arch_path in tqdm(arch_paths, ascii=True, desc="Resolving arch"): + resolved_arch_path = f"{UDB_ROOT}/{args.resolved_dir}/{arch_path}" + os.makedirs(os.path.dirname(resolved_arch_path), exist_ok=True) + resolve_file(arch_path, args.arch_dir, args.resolved_dir) -arch_paths = glob.glob("arch/**/*.yaml", recursive=True, root_dir=UDB_ROOT) -for arch_path in tqdm(arch_paths): - resolved_arch_path = f"{UDB_ROOT}/{OUT_DIR}/{arch_path}" - os.makedirs(os.path.dirname(resolved_arch_path), exist_ok=True) - write_yaml(resolved_arch_path, resolve(arch_path, os.path.join(*arch_path.split("/")[1:]), "arch")) + print(f"[INFO] Resolved architecture files written to {args.resolved_dir}") diff --git a/requirements.txt b/requirements.txt index 2d2633076..0878c6540 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,3 @@ pre_commit==4.0.1 PyYAML==6.0.2 +jsonschema diff --git a/schemas/arch_schema.json b/schemas/arch_schema.json deleted file mode 100644 index 385784650..000000000 --- a/schemas/arch_schema.json +++ /dev/null @@ -1,134 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - - "type": "object", - "title": "Unified Architecture Specification", - "required": [ - "type", - "extensions", - "csrs", - "instructions" - ], - "allOf": [ - { - "if": { - "properties": { - "type": { "enum": ["fully configured"]} - } - }, - "then": { - "required": [ - "implemented_instructions", - "implemented_extensions", - "implemented_csrs", - "params" - ] - } - } - ], - "properties": { - "type": { - "type": "string", - "description": "Type of the arch", - "enum": ["unconfigured", "partially configured", "fully configured"] - }, - "params": { - "type": "object" - }, - "implemented_extensions": { - "description": "Extensions implemented by this architecture", - "type": "array", - "items": { - "type": "object", - "required": ["name", "version"], - "properties": { - "name": { - "type": "string", - "pattern": "^([A-WY]|([SXZ][a-z0-9]+))$", - "description": "Extension name" - }, - "version": { - "oneOf": [ - { - "type": "string", - "description": "Extension version" - }, - { - "type": "number" - } - ] - } - }, - "additionalProperties": false - } - }, - "mandatory_extensions": { - "description": "Extensions mandatory in this architecture", - "type": "array", - "items": { - "$ref": "schema_defs.json#/$defs/extension_requirement" - } - }, - "extensions": { - "type": "array", - "items": { - "$ref": "ext_schema.json#/$defs/ext_data" - } - }, - "csrs": { - "type": "array", - "items": { - "$ref": "csr_schema.json#/$defs/csr_register" - } - }, - "implemented_csrs": { - "description": "CSRs implemented by this architecture", - "type": "array", - "items": { - "type": "string", - "pattern": "^[a-z][a-zA-Z0-9_]+$", - "description": "CSR name" - } - }, - "instructions": { - "type": "array", - "items": { - "$ref": "inst_schema.json#" - } - }, - "implemented_instructions": { - "description": "Instructions implemented by this architecture", - "type": "array", - "items": { - "type": "string", - "pattern": "^[a-z][a-zA-Z0-9.]+$", - "description": "Instruction name" - } - }, - "profile_classes": { - "type": "object" - }, - "profile_releases": { - "type": "object", - "items": { - "type": "string" - } - }, - "manuals": { - "type": "object" - }, - "certificate_classes": { - "type": "array", - "items": { - "$ref": "cert_class_schema.json#" - } - }, - "certificate_models": { - "type": "array", - "items": { - "$ref": "cert_model_schema.json#" - } - } - }, - "additionalProperties": false -} diff --git a/schemas/cert_class_schema.json b/schemas/cert_class_schema.json index d7f7ff80c..bc6769c4d 100644 --- a/schemas/cert_class_schema.json +++ b/schemas/cert_class_schema.json @@ -40,8 +40,8 @@ "minItems": 1, "description": "List of mandatory privilege modes for the class" }, - "__source": { - "$ref": "schema_defs.json#/$defs/__source" + "$source": { + "$ref": "schema_defs.json#/$defs/$source" } } } \ No newline at end of file diff --git a/schemas/cert_model_schema.json b/schemas/cert_model_schema.json index b20551445..79182eb26 100644 --- a/schemas/cert_model_schema.json +++ b/schemas/cert_model_schema.json @@ -100,18 +100,26 @@ "oneOf": [ { "type": "string", - "pattern": "^profile_release/.*\\.yaml#.*" + "pattern": "^profile/.*\\.yaml#.*" }, { "type": "array", "items": { "type": "string", - "pattern": "^profile_release/.*\\.yaml#.*" + "pattern": "^profile/.*\\.yaml#.*" }, "uniqueItems": true } ] }, + "^\\$child_of": { + "type": "array", + "items": { + "type": "string", + "pattern": "^profile/.*\\.yaml#.*" + }, + "uniqueItems": true + }, "^([A-WY])|([SXZ][a-z0-9]+)$": { "type": "object", "properties": { @@ -192,8 +200,8 @@ } } }, - "__source": { - "$ref": "schema_defs.json#/$defs/__source" + "$source": { + "$ref": "schema_defs.json#/$defs/$source" } } } \ No newline at end of file diff --git a/schemas/config_schema.json b/schemas/config_schema.json index 493e5b157..1397a72eb 100644 --- a/schemas/config_schema.json +++ b/schemas/config_schema.json @@ -1,546 +1,178 @@ { "$schema": "http://json-schema.org/draft-07/schema#", - "$defs": { - "params": { - "type": "object", - "required": [ - "NAME", - "XLEN", - "M_MODE_ENDIANESS", - "NUM_PMP_ENTRIES", - "ARCH_ID", - "VENDOR_ID_BANK", - "VENDOR_ID_OFFSET", - "IMP_ID", - "MISALIGNED_LDST", - "NUM_HPM_COUNTERS", - "REPORT_VA_IN_MTVAL_ON_BREAKPOINT", - "REPORT_VA_IN_MTVAL_ON_LOAD_MISALIGNED", - "REPORT_VA_IN_MTVAL_ON_STORE_AMO_MISALIGNED", - "REPORT_VA_IN_MTVAL_ON_INSTRUCTION_MISALIGNED", - "REPORT_VA_IN_MTVAL_ON_LOAD_ACCESS_FAULT", - "REPORT_VA_IN_MTVAL_ON_STORE_AMO_ACCESS_FAULT", - "REPORT_VA_IN_MTVAL_ON_INSTRUCTION_ACCESS_FAULT", - "REPORT_VA_IN_MTVAL_ON_LOAD_PAGE_FAULT", - "REPORT_VA_IN_MTVAL_ON_STORE_AMO_PAGE_FAULT", - "REPORT_VA_IN_MTVAL_ON_INSTRUCTION_PAGE_FAULT", - "REPORT_ENCODING_IN_MTVAL_ON_ILLEGAL_INSTRUCTION", - "TRAP_ON_ILLEGAL_WLRL", - "CONFIG_PTR_ADDRESS", - "PHYS_ADDR_WIDTH", - "CACHE_BLOCK_SIZE", - "MISALIGNED_SPLIT_STRATEGY" - ], - "properties": { - "XLEN": { - "type": "integer", - "enum": [ - 32, - 64 - ], - "description": "Base instruction set datapath width (e.g., RV32 or RV64)\n" - }, - "NAME": { - "type": "string", - "description": "Name of the configuration.\n*Must* match the directory name under cfgs/ where params.yaml is stored.\n" - }, - "MISALIGNED_SPLIT_STRATEGY": { - "type": "string", - "enum": ["by_byte"], - "description": "How the implementation splits misaligned loads/stores." - }, - "M_MODE_ENDIANESS": { - "type": "integer", - "enum": [ 0, 1, 2], - "description": "Endianess of data in M-mode. Can be one of:\n\n * 0: M-mode data is always little endian\n * 1: M-mode data is always big endian\n * 2: M-mode data can be either little or big endian, depending on the RW CSR field mstatus.MBE\n" - }, - "S_MODE_ENDIANESS": { - "type": "integer", - "enum": [0, 1, 2], - "description": "Endianess of data in S-mode. Can be one of:\n\n * 0: S-mode data is always little endian\n * 1: S-mode data is always big endian\n * 2: S-mode data can be either little or big endian, depending on the RW CSR field mstatus.SBE\n" - }, - "U_MODE_ENDIANESS": { - "type": "integer", - "enum": [0, 1, 2], - "description": "Endianess of data in U-mode. Can be one of:\n\n * 0: U-mode data is always little endian\n * 1: U-mode data is always big endian\n * 2: U-mode data can be either little or big endian, depending on the RW CSR field mstatus.UBE\n" - }, - "VS_MODE_ENDIANESS": { - "type": "integer", - "enum": [0, 1, 2], - "description": "Endianess of data in VS-mode. Can be one of:\n\n * 0: VS-mode data is always little endian\n * 1: VS-mode data is always big endian\n * 2: VS-mode data can be either little or big endian, depending on the RW CSR field hstatus.VSBE\n" - }, - "VU_MODE_ENDIANESS": { - "type": "integer", - "enum": [0, 1, 2], - "description": "Endianess of data in VU-mode. Can be one of:\n\n * 0: VU-mode data is always little endian\n * 1: VU-mode data is always big endian\n * 2: VU-mode data can be either little or big endian, depending on the RW CSR field vsstatus.UBE\n" - }, - "SXLEN": { - "description": "XLENs supported in S-mode. Can be one of:\n\n * 32: SXLEN is always 32\n * 64: SXLEN is always 64 * 3264: SXLEN can be changed (via mstatus.SXL) between 32 and 64", - "type": "integer", - "enum": [32, 64, 3264] - }, - "UXLEN": { - "description": "XLENs supported in U-mode. Can be one of:\n\n * 32: UXLEN is always 32\n * 64: UXLEN is always 64 * 3264: UXLEN can be changed (via mstatus.UXL) between 32 and 64", - "type": "integer", - "enum": [32, 64, 3264] - }, - "VSXLEN": { - "description": "XLENs supported in VS-mode. Can be one of:\n\n * 32: VSXLEN is always 32\n * 64: VSXLEN is always 64 * 3264: VSXLEN can be changed (via hstatus.VSXL) between 32 and 64", - "type": "integer", - "enum": [32, 64, 3264] - }, - "VUXLEN": { - "description": "XLENs supported in VU-mode. Can be one of:\n\n * 32: UXLEN is always 32\n * 64: VUXLEN is always 64 * 3264: VUXLEN can be changed (via vsstatus.SXL) between 32 and 64", - "type": "integer", - "enum": [32, 64, 3264] - }, - "ASID_WIDTH": { - "type": "integer", - "maximum": 16, - "manimum": 0, - "description": "Number of implemented ASID bits" - }, - "NUM_PMP_ENTRIES": { - "type": "integer", - "maximum": 64, - "minimum": 0, - "description": "Number of implemented PMP entries" - }, - "ARCH_ID": { - "type": "integer", - "minimum": 0, - "maximum": 18446744073709551615, - "description": "Vendor-specific architecture ID presented in `marchid`" - }, - "VENDOR_ID_BANK": { - "type": "integer", - "minimum": 0, - "maximum": 33554431, - "description": "Vendor JEDEC code, bank" - }, - "VENDOR_ID_OFFSET": { - "type": "integer", - "minimum": 0, - "maximum": 127, - "description": "Vendor JEDEC code, offset" - }, - "IMP_ID": { - "type": "integer", - "minimum": 0, - "maximum": 18446744073709551615, - "description": "Vendor-specific implementation ID present in `mimpid`" - }, - "MISALIGNED_LDST": { - "type": "boolean", - "description": " whether or not the implementation supports misaligned loads and stores in main memory (not including atomics). Must be true when extension Zicclsm is implemented." - }, - "MISALIGNED_AMO": { - "type": "boolean", - "description": " whether or not the implementation supports misaligned atomics.", - "default": false - }, - "NUM_HPM_COUNTERS": { - "type": "integer", - "minimum": 0, - "maximum": 29, - "description": "Number of implemented programmable hardware counters (not including cycle, time, and instret)" - }, - "HPM_EVENTS": { - "type": "array", - "items": { - "type": "integer", - "minimum": 0 + "type": "object", + "title": "Architecture configuration", + "required": [ + "$schema", + "kind", + "type", + "name", + "description" + ], + "allOf": [ + { + "if": { + "properties": { + "type": { "const": "fully configured"} + } + }, + "then": { + "required": [ + "implemented_extensions", + "params" + ], + "properties": { + "params": { + "type": "object" + }, + "mandatory_extensions": { + "type": "null" }, - "uniqueItems": true, - "description": "List of defined HPM events that can be programmed into CSR[mhpmevent*]" - }, - "COUNTINHIBIT_EN": { - "type": "array", - "description": "Indicates which counters can be disabled from mcountinhibit\n\n Formatted as a one-hot enable vector so that, for example, COUNTINHIBIT_EN[0] is for CY and COUNTINHIBIT_EN[3] is for HPM3", - "items": [ - { - "type": "boolean" + "non_mandatory_extensions": { + "type": "null" + }, + "prohbited_extensions": { + "type": "null" + }, + "implemented_extensions": { + "description": "Extensions implemented by this architecture", + "type": "array", + "items": { + "type": "object", + "required": ["name", "version"], + "properties": { + "name": { + "type": "string", + "pattern": "^([A-WY]|([SXZ][a-z0-9]+))$", + "description": "Extension name" + }, + "version": { + "oneOf": [ + { + "type": "string", + "description": "Extension version" + }, + { + "type": "number" + } + ] + } + }, + "additionalProperties": false + } + } + } + } + }, + { + "if": { + "properties": { + "type": { "const": "partially configured"} + } + }, + "then": { + "anyOf": [ + { + "required": [ + "mandatory_extensions" + ] + }, + { + "required": [ + "params" + ] + } + ], + "properties": { + "params": { + "type": "object" + }, + "mandatory_extensions": { + "description": "Extensions mandatory in this architecture", + "type": "array", + "items": { + "$ref": "schema_defs.json#/$defs/extension_requirement" }, - { - "const": false, - "$comment": "There is no counter at index 1" + "default": { + "const": [] } - ], - "additionalItems": { - "type": "boolean" }, - "minItems": 32, - "maxItems": 32 - }, - "COUNTENABLE_EN": { - "type": "array", - "description": "Indicates which counters can be delegate from mcounteren\n\n Formatted as a one-hot enable vector so that, for example, COUNTENABLE_EN[0] is for CY and COUNTENABLE_EN[3] is for HPM3", - "items": [ - { - "type": "boolean" + "non_mandatory_extensions": { + "description": "Extensions that are not mandatory but are still _special_ in this architecture. This could mean different things depending on the context: for certificates or generated IP, this would correspond to _optional supported_, and extensions not in non_mandatory are not possible. For profiles, this corresponds to some type of _profile optional_, but extensions in non_mandatory are still possible.", + "type": "array", + "items": { + "$ref": "schema_defs.json#/$defs/extension_requirement" }, - { - "const": false, - "$comment": "There is no counter at index 1" + "default": { + "const": [] } - ], - "additionalItems": { - "type": "boolean" }, - "minItems": 32, - "maxItems": 32 - }, - "TRAP_ON_ILLEGAL_WLRL": { - "type": "boolean", - "default": true, - "description": "When true, writing an illegal value to a WLRL CSR field raises an Illegal Instruction exception.\nWhen false, writing an illegal value to a WLRL CSR field is ignored." - }, - - "REPORT_VA_IN_MTVAL_ON_BREAKPOINT": { - "type": "boolean", - "default": false, - "description": "When true, mtval is written with the virtual PC of the EBREAK instruction (same information as mepc).\nWhen false, mtval is written with 0 on an EBREAK instruction\n\nregardless, mtval is always written with a virtual PC when an external breakpoint is generated." - }, - "REPORT_VA_IN_MTVAL_ON_LOAD_MISALIGNED": { - "type": "boolean", - "default": false, - "description": "When true, mtval is written with the virtual address when a misaligned load causes a LoadAddressMisaligned exception.\nWhen false, mtval is written with 0" - }, - "REPORT_VA_IN_MTVAL_ON_STORE_AMO_MISALIGNED": { - "type": "boolean", - "default": false, - "description": "When true, mtval is written with the virtual address when a misaligned store or atomic causes a StoreAmoAddressMisaligned exception.\nWhen false, mtval is written with 0" - }, - "REPORT_VA_IN_MTVAL_ON_INSTRUCTION_MISALIGNED": { - "type": "boolean", - "default": false, - "description": "When true, mtval is written with the virtual address when a misaligned store or atomic causes a StoreAmoAddressMisaligned exception.\nWhen false, mtval is written with 0" - }, - "REPORT_VA_IN_MTVAL_ON_LOAD_ACCESS_FAULT": { - "type": "boolean", - "default": false, - "description": "When true, mtval is written with the virtual address when a load causes a LoadAccessFault exception.\nWhen false, mtval is written with 0" - }, - "REPORT_VA_IN_MTVAL_ON_STORE_AMO_ACCESS_FAULT": { - "type": "boolean", - "default": false, - "description": "When true, mtval is written with the virtual address when a store or atomic causes a StoreAmoAccessFault exception.\nWhen false, mtval is written with 0" - }, - "REPORT_VA_IN_MTVAL_ON_INSTRUCTION_ACCESS_FAULT": { - "type": "boolean", - "default": false, - "description": "When true, mtval is written with the virtual address when a store or atomic causes a StoreAmoAccessFault exception.\nWhen false, mtval is written with 0" - }, - "REPORT_VA_IN_MTVAL_ON_LOAD_PAGE_FAULT": { - "type": "boolean", - "default": false, - "description": "When true, mtval is written with the virtual address when a load causes a LoadPageFault exception.\nWhen false, mtval is written with 0" - }, - "REPORT_VA_IN_MTVAL_ON_STORE_AMO_PAGE_FAULT": { - "type": "boolean", - "default": false, - "description": "When true, mtval is written with the virtual address when a store or atomic causes a StoreAmoPageFault exception.\nWhen false, mtval is written with 0" - }, - "REPORT_VA_IN_MTVAL_ON_INSTRUCTION_PAGE_FAULT": { - "type": "boolean", - "default": false, - "description": "When true, mtval is written with the virtual address when a store or atomic causes a StoreAmoPageFault exception.\nWhen false, mtval is written with 0" - }, - "REPORT_ENCODING_IN_MTVAL_ON_ILLEGAL_INSTRUCTION": { - "type": "boolean", - "default": false, - "description": "When true, mtval is written with the faulting instruciton encoding when a IllegalInstruction exception occurs.\nWhen false, mtval is written with 0" - }, - "REPORT_CAUSE_IN_MTVAL_ON_SOFTWARE_CHECK": { - "type": "boolean", - "default": false, - "description": "When true, mtval is written with the casue when a SoftwareCheck exception occurs.\nWhen false, mtval is written with 0" - }, - "MTVAL_WIDTH": { - "type": "integer", - "maximum": 64, - "description": "Number of implemented bits in MTVAL. Must be >= largest virtual address size if a VA is ever written to mtval by hardware or if Sdext is implemented. Must also be able to hold the minimum of MXLEN or ILEN if encodings are reported on IllegalInstruction exceptions" - }, - - "REPORT_VA_IN_STVAL_ON_BREAKPOINT": { - "type": "boolean", - "default": false, - "description": "When true, stval is written with the virtual PC of the EBREAK instruction (same information as mepc).\nWhen false, stval is written with 0 on an EBREAK instruction\n\nregardless, stval is always written with a virtual PC when an external breakpoint is generated." - }, - "REPORT_VA_IN_STVAL_ON_LOAD_MISALIGNED": { - "type": "boolean", - "default": false, - "description": "When true, stval is written with the virtual address when a misaligned load causes a LoadAddressMisaligned exception.\nWhen false, stval is written with 0" - }, - "REPORT_VA_IN_STVAL_ON_STORE_AMO_MISALIGNED": { - "type": "boolean", - "default": false, - "description": "When true, stval is written with the virtual address when a misaligned store or atomic causes a StoreAmoAddressMisaligned exception.\nWhen false, stval is written with 0" - }, - "REPORT_VA_IN_STVAL_ON_INSTRUCTION_MISALIGNED": { - "type": "boolean", - "default": false, - "description": "When true, stval is written with the virtual address when a misaligned store or atomic causes a StoreAmoAddressMisaligned exception.\nWhen false, stval is written with 0" - }, - "REPORT_VA_IN_STVAL_ON_LOAD_ACCESS_FAULT": { - "type": "boolean", - "default": false, - "description": "When true, stval is written with the virtual address when a load causes a LoadAccessFault exception.\nWhen false, stval is written with 0" - }, - "REPORT_VA_IN_STVAL_ON_STORE_AMO_ACCESS_FAULT": { - "type": "boolean", - "default": false, - "description": "When true, stval is written with the virtual address when a store or atomic causes a StoreAmoAccessFault exception.\nWhen false, stval is written with 0" - }, - "REPORT_VA_IN_STVAL_ON_INSTRUCTION_ACCESS_FAULT": { - "type": "boolean", - "default": false, - "description": "When true, stval is written with the virtual address when a store or atomic causes a StoreAmoAccessFault exception.\nWhen false, stval is written with 0" - }, - "REPORT_VA_IN_STVAL_ON_LOAD_PAGE_FAULT": { - "type": "boolean", - "default": false, - "description": "When true, stval is written with the virtual address when a load causes a LoadPageFault exception.\nWhen false, stval is written with 0" - }, - "REPORT_VA_IN_STVAL_ON_STORE_AMO_PAGE_FAULT": { - "type": "boolean", - "default": false, - "description": "When true, stval is written with the virtual address when a store or atomic causes a StoreAmoPageFault exception.\nWhen false, stval is written with 0" - }, - "REPORT_VA_IN_STVAL_ON_INSTRUCTION_PAGE_FAULT": { - "type": "boolean", - "default": false, - "description": "When true, stval is written with the virtual address when a store or atomic causes a StoreAmoPageFault exception.\nWhen false, stval is written with 0" - }, - "REPORT_ENCODING_IN_STVAL_ON_ILLEGAL_INSTRUCTION": { - "type": "boolean", - "default": false, - "description": "When true, stval is written with the faulting instruction encoding when a IllegalInstruction exception occurs.\nWhen false, stval is written with 0" - }, - "REPORT_CAUSE_IN_STVAL_ON_SOFTWARE_CHECK": { - "type": "boolean", - "default": false, - "description": "When true, stval is written with the cause when a SoftwareCheck exception occurs.\nWhen false, stval is written with 0" - }, - "STVAL_WIDTH": { - "type": "integer", - "maximum": 64, - "description": "Number of implemented bits in STVAL. Must be >= largest virtual address size if a VA is ever written to stval by hardware or if Sdext is implemented. Must also be able to hold the minimum of SXLEN or ILEN if encodings are reported on IllegalInstruction exceptions" - }, - - "REPORT_VA_IN_VSTVAL_ON_BREAKPOINT": { - "type": "boolean", - "default": false, - "description": "When true, vstval is written with the virtual PC of the EBREAK instruction (same information as mepc).\nWhen false, vstval is written with 0 on an EBREAK instruction\n\nregardless, vstval is always written with a virtual PC when an external breakpoint is generated." - }, - "REPORT_VA_IN_VSTVAL_ON_LOAD_MISALIGNED": { - "type": "boolean", - "default": false, - "description": "When true, vstval is written with the virtual address when a misaligned load causes a LoadAddressMisaligned exception.\nWhen false, vstval is written with 0" - }, - "REPORT_VA_IN_VSTVAL_ON_STORE_AMO_MISALIGNED": { - "type": "boolean", - "default": false, - "description": "When true, vstval is written with the virtual address when a misaligned store or atomic causes a StoreAmoAddressMisaligned exception.\nWhen false, vstval is written with 0" - }, - "REPORT_VA_IN_VSTVAL_ON_INSTRUCTION_MISALIGNED": { - "type": "boolean", - "default": false, - "description": "When true, vstval is written with the virtual address when a misaligned store or atomic causes a StoreAmoAddressMisaligned exception.\nWhen false, vstval is written with 0" - }, - "REPORT_VA_IN_VSTVAL_ON_LOAD_ACCESS_FAULT": { - "type": "boolean", - "default": false, - "description": "When true, vstval is written with the virtual address when a load causes a LoadAccessFault exception.\nWhen false, vstval is written with 0" - }, - "REPORT_VA_IN_VSTVAL_ON_STORE_AMO_ACCESS_FAULT": { - "type": "boolean", - "default": false, - "description": "When true, vstval is written with the virtual address when a store or atomic causes a StoreAmoAccessFault exception.\nWhen false, vstval is written with 0" - }, - "REPORT_VA_IN_VSTVAL_ON_INSTRUCTION_ACCESS_FAULT": { - "type": "boolean", - "default": false, - "description": "When true, vstval is written with the virtual address when a store or atomic causes a StoreAmoAccessFault exception.\nWhen false, vstval is written with 0" - }, - "REPORT_VA_IN_VSTVAL_ON_LOAD_PAGE_FAULT": { - "type": "boolean", - "default": false, - "description": "When true, vstval is written with the virtual address when a load causes a LoadPageFault exception.\nWhen false, vstval is written with 0" - }, - "REPORT_VA_IN_VSTVAL_ON_STORE_AMO_PAGE_FAULT": { - "type": "boolean", - "default": false, - "description": "When true, vstval is written with the virtual address when a store or atomic causes a StoreAmoPageFault exception.\nWhen false, vstval is written with 0" - }, - "REPORT_VA_IN_VSTVAL_ON_INSTRUCTION_PAGE_FAULT": { - "type": "boolean", - "default": false, - "description": "When true, vstval is written with the virtual address when a store or atomic causes a StoreAmoPageFault exception.\nWhen false, vstval is written with 0" - }, - "REPORT_ENCODING_IN_VSTVAL_ON_ILLEGAL_INSTRUCTION": { - "type": "boolean", - "default": false, - "description": "When true, vstval is written with the faulting instruciton encoding when a IllegalInstruction exception occurs.\nWhen false, vstval is written with 0" - }, - "REPORT_CAUSE_IN_VSTVAL_ON_SOFTWARE_CHECK": { - "type": "boolean", - "default": false, - "description": "When true, vstval is written with the casue when a SoftwareCheck exception occurs.\nWhen false, vstval is written with 0" - }, - - - - "CONFIG_PTR_ADDRESS": { - "type": "integer", - "minimum": 0, - "maximum": 18446744073709551615, - "description": "Physical address of the unified discovery configuration data structure (reported in `mconfigptr`)." - }, - "PMP_GRANULARITY": { - "type": "integer", - "minimum": 2, - "maximum": 66, - "description": "log2 of the smallest supported PMP region." - }, - "PMA_GRANULARITY": { - "type": "integer", - "minimum": 2, - "maximum": 66, - "description": "log2 of the smallest supported PMA region." - }, - "PHYS_ADDR_WIDTH": { - "type": "integer", - "minimum": 1, - "maximum": 36028797018963968, - "description": "Number of bits in the physical address space" - }, - "MUTABLE_MISA_A": { - "type": "boolean", - "default": false, - "description": "When A extension is supported, wether or not it can be dynamically disabled by writing the `misa.A` bit." - }, - "MUTABLE_MISA_B": { - "type": "boolean", - "default": false, - "description": "When B extension is supported, wether or not it can be dynamically disabled by writing the `misa.B` bit." - }, - "MUTABLE_MISA_C": { - "type": "boolean", - "default": false, - "description": "When C extension is supported, wether or not it can be dynamically disabled by writing the `misa.C` bit." - }, - "MUTABLE_MISA_D": { - "type": "boolean", - "default": false, - "description": "When D extension is supported, wether or not it can be dynamically disabled by writing the `misa.D` bit." - }, - "MUTABLE_MISA_F": { - "type": "boolean", - "default": false, - "description": "When F extension is supported, wether or not it can be dynamically disabled by writing the `misa.F` bit." - }, - "MUTABLE_MISA_H": { - "type": "boolean", - "default": false, - "description": "When H extension is supported, wether or not it can be dynamically disabled by writing the `misa.H` bit." - }, - "MUTABLE_MISA_M": { - "type": "boolean", - "default": false, - "description": "When M extension is supported, wether or not it can be dynamically disabled by writing the `misa.M` bit." - }, - "MUTABLE_MISA_S": { - "type": "boolean", - "default": false, - "description": "When S extension is supported, wether or not it can be dynamically disabled by writing the `misa.S` bit." - }, - "MUTABLE_MISA_U": { - "type": "boolean", - "default": false, - "description": "When U extension is supported, wether or not it can be dynamically disabled by writing the `misa.U` bit." - }, - "MUTABLE_MISA_V": { - "type": "boolean", - "default": false, - "description": "When V extension is supported, wether or not it can be dynamically disabled by writing the `misa.V` bit." - }, - "CACHE_BLOCK_SIZE": { - "type": "integer", - "minimum": 1, - "maximum": 65536, - "description": "Size, in bytes of a cache block (as seen by cache maintence operations)" - }, - "NUM_EXTERNAL_GUEST_INTERRUPTS": { - "type": "integer", - "minimum": 1, - "maximum": 63, - "description": "Number of supported virtualized guest external interrupts.\nCorresponds to the GEILEN parameter in RISC-V specifications." - }, - "LRSC_RESERVATION_STRATEGY": { - "type": "string", - "enum": [ - "reserve naturally-aligned 64-byte region", - "reserve naturally-aligned 128-byte region", - "reserve exactly enough to cover the access", - "custom" - ], - "description": "Strategy used to handle reservation sets\n\n * 'reserve naturally-aligned 64-byte region': Always reserve the 64-byte block containing the LR/SC address\n * 'reserve naturally-aligned 128-byte region': Always reserve the 128-byte block containing the LR/SC address\n * 'reserve exactly enough to cover the access': Always reserve exactly the LR/SC access, and no more\n * 'custom': Custom behavior, leading to an 'unpredictable' call on any LR/SC" + "prohibited_extensions": { + "description": "Extensions explicitly prohibited in this architecture. Does *not* need to include extensions that are excluded because of a conflict-by-definition with a mandatory extension, as those will be calculated automatically", + "type": "array", + "items": { + "$ref": "schema_defs.json#/$defs/extension_requirement" + }, + "default": { + "const": [] + } + }, + "implemented_extensions": { + "type": "null" + } + } + } + }, + { + "if": { + "properties": { + "type": { "const": "unconfigured"} + } + }, + "then": { + "mandatory_extensions": { + "type": "null" }, - "LRSC_FAIL_ON_VA_SYNONYM": { - "type": "boolean", - "description": "whether or not an SC will fail if its VA does not match the VA of the prior LR, even if the physical address of the SC and LR are the same" + "non_mandatory_extensions": { + "type": "null" }, - "LRSC_FAIL_ON_NON_EXACT_LRSC": { - "type": "boolean", - "description": "whether or not a Store Conditional fails if its physical address and size do not\nexactly match the physical address and size of the last Load Reserved in program order\n(independent of whether or not the SC is in the current reservation set)\n" + "prohbited_extensions": { + "type": "null" }, - "LRSC_MISALIGNED_BEHAVIOR": { - "type": "string", - "enum": [ - "always raise misaligned exception", - "always raise access fault", - "custom" - ], - "description": "what to do when an LR/SC address is misaligned:\n\n * 'always raise misaligned exception': self-explainitory\n * 'always raise access fault': self-explainitory\n * 'custom': Custom behavior; misaligned LR/SC may sometimes raise a misaligned exception and sometimes raise a access fault. Will lead to an 'unpredictable' call on any misaligned LR/SC access" + "params": { + "type": "null" } - }, - "additionalProperties": false + } } - }, - "type": "object", - "required": [ - "params", - "extensions" ], "properties": { - "params": { - "$ref": "#/$defs/params" + "type": { + "type": "string", + "description": "Type of the arch", + "enum": ["unconfigured", "partially configured", "fully configured"] }, - "extensions": { - "type": "array", - "description": "Extension names and versions", - "items": { - "type": "array", - "prefixItems": [ - { - "type": "string" - }, - { - "oneOf": [ - { - "type": "number" - }, - { - "type": "string", - "pattern": "^[0-9]+(\\.[0-9]+(\\.[0-9]+(-[a-fA-F0-9]+)?)?)?$" - } - ] - } - ], - "additionalItems": false - } - } - } + "$schema": { + "type": "string", + "format": "uri-reference", + "const": "config_schema.json#" + }, + "kind": { + "type": "string", + "const": "architecture configuration" + }, + "name": { + "type": "string" + }, + "description": { + "type": "string", + "description": "An asciidoc description of the configuration" + }, + "params": true, + "mandatory_extensions": true, + "non_mandatory_extensions": true, + "prohibited_extensions": true, + "implemented_extensions": true + }, + "additionalProperties": false } diff --git a/schemas/csr_schema.json b/schemas/csr_schema.json index 16ef9686c..c22c0bfdb 100644 --- a/schemas/csr_schema.json +++ b/schemas/csr_schema.json @@ -256,7 +256,7 @@ "type": "string", "description": "Function that returns the value of the CSR when read by software (i.e., a Zicsr instruction). If not specified, the value last written (through hw_write) is returned." }, - "__source": { + "$source": { "description": "Path to the source file this definition came from; used by downstream tooling -- not expected to be in handwritten files", "type": "string" } diff --git a/schemas/ext_schema.json b/schemas/ext_schema.json index 8c281c4e6..7be4b12e6 100644 --- a/schemas/ext_schema.json +++ b/schemas/ext_schema.json @@ -67,34 +67,10 @@ }, "company": { "description": "The company that developed this extension", - "type": "object", - "properties": { - "name": { - "type": "string", - "description": "Name of the company that developed this extension. Should be \"RISC-V International\" for standard extensions" - }, - "url": { - "type": "string", - "format": "uri", - "description": "Website of the company that developed this extension. Should be \"https://riscv.org\" for standard extensions" - } - } + "$ref": "schema_defs.json#/$defs/company" }, "doc_license": { - "description": "License that applies to the textual documentation for this extension", - "type": "object", - "properties": { - "name": { - "type": "string", - "description": "License name" - }, - "url": { - "type": "string", - "format": "uri", - "description": "Link to license text" - } - }, - "additionalProperties": false + "$ref": "schema_defs.json#/$defs/license" }, "type": { "enum": ["unprivileged", "privileged"] }, "conflicts": { @@ -278,7 +254,7 @@ }, "additionalProperties": false }, - "__source": { + "$source": { "type": "string", "description": "Source file where this extension was defined" } diff --git a/schemas/implemented_exts_schema.json b/schemas/implemented_exts_schema.json deleted file mode 100644 index cf4d6dd29..000000000 --- a/schemas/implemented_exts_schema.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - - "type": "object", - "properties": { - "implemented_extensions": { - "type": "array", - "items": { - "$ref": "schema_defs.json#/$defs/extension_name_and_version" - } - } - }, - "additionalProperties": false -} diff --git a/schemas/inst_schema.json b/schemas/inst_schema.json index ac1c0082d..798319c50 100644 --- a/schemas/inst_schema.json +++ b/schemas/inst_schema.json @@ -75,6 +75,11 @@ "name": { "type": "string" }, + "$child_of": { + "type": "string", + "pattern": "^common/inst_variable_types\\.yaml#/[a-zA-Z0-9_]+", + "description": "Cookie crumb of the reference to variable metadata" + }, "location": { "$ref": "#/$defs/field_location" }, @@ -270,7 +275,7 @@ "additionalProperties": false } }, - "__source": { + "$source": { "description": "Path to the source file. Used by downstream tooling; not expected to be found in handwritten files", "type": "string" } diff --git a/schemas/manual_schema.json b/schemas/manual_schema.json new file mode 100644 index 000000000..1b3de0aa0 --- /dev/null +++ b/schemas/manual_schema.json @@ -0,0 +1,42 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + + "type": "object", + "required": ["$schema", "kind", "name"], + "properties": { + "$schema": { + "type": "string", + "const": "manual_schema.json#" + }, + "kind": { + "type": "string", + "const": "manual" + }, + "name": { + "type": "string", + "description": "Name (database key) of this manual" + }, + "marketing_name": { + "type": "string", + "description": "The publicly displayed manual name" + }, + "state": { + "$ref": "schema_defs.json#/$defs/spec_state", + "description": "State of this version" + }, + "url": { + "type": "string", + "format": "uri", + "description": "URL to the repository" + }, + "license": { + "$ref": "schema_defs.json#/$defs/license" + }, + "$source": { + "type": "string", + "format": "uri-reference", + "description": "Relative (from arch/) path to the original source file" + } + }, + "additionalProperties": false +} \ No newline at end of file diff --git a/schemas/manual_version_schema.json b/schemas/manual_version_schema.json index 9557413ff..5ea6e7f92 100644 --- a/schemas/manual_version_schema.json +++ b/schemas/manual_version_schema.json @@ -50,15 +50,30 @@ }, "type": "object", - "required": ["manual", "version", "name", "marketing_version", "state", "volumes"], + "required": ["$schema", "kind", "manual", "version", "name", "marketing_version", "state", "volumes"], "properties": { + "$schema": { + "type": "string", + "const": "manual_version_schema.json#" + }, + "kind": { + "type": "string", + "const": "manual version" + }, "name": { "type": "string", "description": "Name (database key) of this version" }, "manual": { - "type": "string", - "description": "Name (database key) of the manual this version belongs to" + "type": "object", + "properties": { + "$ref": { + "type": "string", + "format": "uri-reference", + "pattern": "^manual/.*\\.yaml#$", + "description": "Pointer to the manual" + } + } }, "version": { "$ref": "schema_defs.json#/$defs/semantic_version", @@ -92,6 +107,11 @@ "$ref": "#/$defs/volume" }, "description": "List of volumes in this version" + }, + "$source": { + "type": "string", + "format": "uri-reference", + "description": "Relative (from arch/) path to the original source file" } }, "additionalProperties": false diff --git a/schemas/profile_class_schema.json b/schemas/profile_class_schema.json new file mode 100644 index 000000000..134f40a56 --- /dev/null +++ b/schemas/profile_class_schema.json @@ -0,0 +1,48 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + + "type": "object", + "required": ["$schema", "kind", "name"], + "properties": { + "$schema": { + "type": "string", + "const": "profile_class_schema.json#" + }, + "kind": { + "type": "string", + "const": "profile class" + }, + "name": { + "type": "string", + "description": "Name (database key) of this Profile Class" + }, + "marketing_name": { + "type": "string", + "description": "The publicly displayed profile class name" + }, + "introduction": { + "type": "string", + "description": "Asciidoc introduction to this Profile Class" + }, + "description": { + "type": "string", + "description": "Prose introduction, in asciidoc" + }, + "naming_scheme": { + "type": "string", + "description": "Commentary on how profile releases in the class are named" + }, + "company": { + "$ref": "schema_defs.json#/$defs/company" + }, + "doc_license": { + "$ref": "schema_defs.json#/$defs/license" + }, + "$source": { + "type": "string", + "format": "uri-refencence", + "description": "Realtive (from arch/) path to the original YAML file" + } + }, + "additionalProperties": false +} \ No newline at end of file diff --git a/schemas/profile_schema.json b/schemas/profile_schema.json new file mode 100644 index 000000000..27cc3b985 --- /dev/null +++ b/schemas/profile_schema.json @@ -0,0 +1,20 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + + "type": "object", + "required": ["$schema", "kind", "name"], + "properties": { + "$schema": { + "type": "string", + "const": "profile_schema.json#" + }, + "kind": { + "type": "string", + "const": "profile" + }, + "name": { + "type": "string", + "description": "Name (database key) of this Profile" + } + } +} \ No newline at end of file diff --git a/schemas/schema_defs.json b/schemas/schema_defs.json index 57c886132..0f1afdd95 100644 --- a/schemas/schema_defs.json +++ b/schemas/schema_defs.json @@ -4,7 +4,7 @@ "title": "Common patterns used by all schemas", "$defs": { - "__source": { + "$source": { "type": "string", "format": "uri-reference", "description": "Path to the source file containing this object" @@ -54,6 +54,46 @@ "nonstandard-released" ] }, + "license": { + "description": "License that applies to the textual documentation for this extension", + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "License name" + }, + "id": { + "type": "string", + "description": "License identifier" + }, + "url": { + "type": "string", + "format": "uri", + "description": "Link to license text" + }, + "text_url": { + "type": "string", + "format": "uri", + "description": "Link to license text" + } + }, + "additionalProperties": false + }, + "company": { + "description": "A company", + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "Name of the company. Should be \"RISC-V International\" for standard extensions" + }, + "url": { + "type": "string", + "format": "uri", + "description": "Website of the company. Should be \"https://riscv.org\" for standard extensions" + } + } + }, "extension_presence": { "oneOf": [ { @@ -137,7 +177,6 @@ "additionalProperties": false } ] - }, "requires_entry": { "oneOf": [